Exemple #1
0
 public void Draw(float frameTime, Frustum frustum)
 {
     Gl.glPushMatrix();
     Gl.glTranslatef(position.x, position.y, position.z);
     Gl.glScalef(scaleX, scaleY, scaleZ);
     Gl.glRotatef(rotDeg, rotX, rotY, rotZ);
     Gl.glCallList(list);
     Gl.glPopMatrix();
 }
Exemple #2
0
 public override void UpdateViewMatrix()
 {
     ViewMatrix = Matrix.LookAtLH(Position, Target, Up);
     _frustum   = Frustum.FromViewProjection(ViewProjectionMatrix);
 }
Exemple #3
0
 public bool IsInside(Frustum frustum)
 {
     return(GlobalSphere.Radius == 0 || frustum.Contains(GlobalSphere));
 }
Exemple #4
0
        public bool Initialize(SystemConfiguration configuration, IntPtr windowHandle)
        {
            try
            {
                // Create the Direct3D object.
                D3D = new DX11();
                // Initialize the Direct3D object.
                if (!D3D.Initialize(configuration, windowHandle))
                {
                    MessageBox.Show("Could not initialize Direct3D", "Error", MessageBoxButtons.OK);
                    return(false);
                }

                // Create the camera object
                Camera = new Camera();

                // Initialize a base view matrix the camera for 2D user interface rendering.
                Camera.SetPosition(0, 0, -1);
                Camera.Render();
                var baseViewMatrix = Camera.ViewMatrix;

                // Create the text object.
                Text = new Text();
                if (!Text.Initialize(D3D.Device, D3D.DeviceContext, windowHandle, configuration.Width, configuration.Height, baseViewMatrix))
                {
                    MessageBox.Show("Could not initialize the text object", "Error", MessageBoxButtons.OK);
                    return(false);
                }

                // Create the model class.
                Model = new Model();

                // Initialize the model object.
                if (!Model.Initialize(D3D.Device, "sphere.txt", new[] { "stone01.dds", "light01.dds" }))
                {
                    MessageBox.Show("Could not initialize the model object", "Error", MessageBoxButtons.OK);
                    return(false);
                }

                // Create the light shader object.
                LightMapShader = new LightMapShader();

                // Initialize the light shader object.
                if (!LightMapShader.Initialize(D3D.Device, windowHandle))
                {
                    MessageBox.Show("Could not initialize the light shader", "Error", MessageBoxButtons.OK);
                    return(false);
                }

                // Create the light object.
                Light = new Light();

                // Initialize the light object.
                Light.SetAmbientColor(0.15f, 0.15f, 0.15f, 1.0f);
                Light.SetDiffuseColor(1, 0, 0, 1f);
                Light.SetDirection(1, 0, 1);
                Light.SetSpecularColor(0, 1, 1, 1);
                Light.SetSpecularPower(32);

                // Create the model list object.
                ModelList = new ModelList();

                // Initialize the model list object.
                if (!ModelList.Initialize(25))
                {
                    MessageBox.Show("Could not initialize the model list object", "Error", MessageBoxButtons.OK);
                    return(false);
                }

                // Create the frustum object.
                Frustum = new Frustum();

                return(true);
            }
            catch (Exception ex)
            {
                MessageBox.Show("Could not initialize Direct3D\nError is '" + ex.Message + "'");
                return(false);
            }
        }
Exemple #5
0
 public override bool Visible(BoundingBox box)
 {
     return(Frustum.Intersect(box) > 0 && SmallerCamera?._innerCamera.Intersect(box) != FrustrumIntersectionType.Inside);
 }
Exemple #6
0
 public bool BoundingVolumeIsInView(BoundingSphere sphere)
 {
     return(Frustum.Contains(sphere) != ContainmentType.Disjoint);
 }
 public void TraceFrustumInScene(Matrix viewProjection)
 {
     Frustum frustum = new Frustum(viewProjection);
     ChemEntity[] entities = sceneGraph.GetAllInsideFrustum(frustum);
     if (entities != null)
     {
         foreach (ChemEntity entity in entities)
         {
             AtomSelectionEntity selEntity = new AtomSelectionEntity((AtomEntity)entity);
             selEntity.Init(device);
             postSceneViewEntities.Add(selEntity);
         }
     }
 }
        ConvexPolyhedron GetConvexPolyhedronFromFrustum( ref Frustum frustum )
        {
            Vec3[] frustumPoints = null;
            frustum.ToPoints( ref frustumPoints );

            Vec3[] points = new Vec3[ 5 ];
            points[ 0 ] = frustum.Origin;
            points[ 1 ] = frustumPoints[ 4 ];
            points[ 2 ] = frustumPoints[ 5 ];
            points[ 3 ] = frustumPoints[ 6 ];
            points[ 4 ] = frustumPoints[ 7 ];

            ConvexPolyhedron.Face[] faces = new ConvexPolyhedron.Face[ 6 ];
            faces[ 0 ] = new ConvexPolyhedron.Face( 0, 1, 2 );
            faces[ 1 ] = new ConvexPolyhedron.Face( 0, 2, 3 );
            faces[ 2 ] = new ConvexPolyhedron.Face( 0, 3, 4 );
            faces[ 3 ] = new ConvexPolyhedron.Face( 0, 4, 1 );
            faces[ 4 ] = new ConvexPolyhedron.Face( 1, 3, 2 );
            faces[ 5 ] = new ConvexPolyhedron.Face( 3, 1, 4 );

            return new ConvexPolyhedron( points, faces, .0001f );
        }
Exemple #9
0
 // Function for determining if sphere is in view
 public bool BoundingVolumeIsInView(BoundingSphere sphere)
 {
     // Check that sphere is completely outside frustum
     return(Frustum.Contains(sphere) != ContainmentType.Disjoint);
 }
Exemple #10
0
 internal Frustum GetFrustum(float aspect)
 => Frustum.CreateFromVerticalAngleAndAspect(
     VerticalFov,
     aspect,
     NEAR_CLIP_DISTANCE,
     FAR_CLIP_DISTANCE);
        protected override void DoRender()
        {
            // Early exit
            if (Lights.Count == 0)
            {
                return;
            }

            // Retrieve device context
            var context = this.DeviceManager.Direct3DContext;

            // backup existing context state
            int       oldStencilRef = 0;
            RawColor4 oldBlendFactor;
            int       oldSampleMaskRef;

            using (var oldVertexLayout = context.InputAssembler.InputLayout)
                using (var oldPixelShader = context.PixelShader.Get())
                    using (var oldVertexShader = context.VertexShader.Get())
                        using (var oldBlendState = context.OutputMerger.GetBlendState(out oldBlendFactor, out oldSampleMaskRef))
                            using (var oldDepthState = context.OutputMerger.GetDepthStencilState(out oldStencilRef))
                                using (var oldRSState = context.Rasterizer.State)
                                {
                                    // Assign shader resources - TODO: create array in CreateDeviceDependentResources instead
                                    context.PixelShader.SetShaderResources(0, gbuffer.SRVs.ToArray().Concat(new[] { gbuffer.DSSRV }).ToArray());

                                    // Assign the additive blend state
                                    context.OutputMerger.BlendState = blendStateAdd;

                                    // Retrieve camera parameters
                                    SharpDX.FrustumCameraParams cameraParams = Frustum.GetCameraParams();

                                    // For each configured light
                                    for (var i = 0; i < Lights.Count; i++)
                                    {
                                        PerLight light = Lights[i];

                                        PixelShader shader = null; // Assign shader
                                        if (light.Type == LightType.Ambient)
                                        {
                                            shader = psAmbientLight;
                                        }
                                        else if (light.Type == LightType.Directional)
                                        {
                                            shader = psDirectionalLight;
                                        }
                                        else if (light.Type == LightType.Point)
                                        {
                                            shader = psPointLight;
                                        }
                                        //else if (light.Type == LightType.Spot)
                                        //    shader = psSpotLight;

                                        // Update the perLight constant buffer
                                        // Calculate view space position (for frustum checks)
                                        Vector3 lightDir     = Vector3.Normalize(Lights[i].Direction);
                                        Vector4 viewSpaceDir = Vector4.Transform(Vector3.Transform(lightDir, PerObject.World), PerObject.View);
                                        light.Direction = new Vector3(viewSpaceDir.X, viewSpaceDir.Y, viewSpaceDir.Z);
                                        Vector4 viewSpacePos = Vector4.Transform(Vector3.Transform(Lights[i].Position, PerObject.World), PerObject.View);
                                        light.Position = new Vector3(viewSpacePos.X, viewSpacePos.Y, viewSpacePos.Z);

                                        context.UpdateSubresource(ref light, perLightBuffer);
                                        context.PixelShader.SetConstantBuffer(4, perLightBuffer);

                                        light.Position  = Lights[i].Position;
                                        light.Direction = Lights[i].Direction;

                                        // Check if the light should be considered full screen
                                        bool isFullScreen = light.Type == LightType.Directional ||
                                                            light.Type == LightType.Ambient;
                                        if (!isFullScreen)
                                        {
                                            isFullScreen = (cameraParams.ZNear > viewSpacePos.Z - light.Range &&
                                                            cameraParams.ZFar < viewSpacePos.Z + light.Range);
                                        }
                                        if (isFullScreen)
                                        {
                                            context.OutputMerger.DepthStencilState = depthDisabled;
                                            // Use SAQuad
                                            saQuad.ShaderResources = null;
                                            saQuad.Shader          = shader;
                                            saQuad.Render();
                                        }
                                        else // Render volume
                                        {
                                            context.PixelShader.Set(shader);
                                            context.VertexShader.Set(vertexShader);

                                            Matrix world = Matrix.Identity;

                                            MeshRenderer volume = null;;
                                            switch (light.Type)
                                            {
                                            case LightType.Point:
                                                // Prepare world matrix
                                                // Ensure no abrupt light edges with +50%
                                                world.ScaleVector = Vector3.One * light.Range * 2f;
                                                volume            = pointLightVolume;
                                                break;

                                            /* TODO: Spot light support
                                             * case LightType.Spot:
                                             *  // Determine rotation!
                                             *  var D = Vector3.Normalize(light.Direction);
                                             *  var s1 = Vector3.Cross(D, Vector3.UnitZ);
                                             *  var s2 = Vector3.Cross(D, Vector3.UnitY);
                                             *  Vector3 S;
                                             *  if (s1.LengthSquared() > s2.LengthSquared())
                                             *      S = s1;
                                             *  else
                                             *      S = s2;
                                             *  var U = Vector3.Cross(D, S);
                                             *  Matrix rotate = Matrix.Identity;
                                             *  rotate.Forward = D;
                                             *  rotate.Down = U;
                                             *  rotate.Left = S;
                                             *
                                             *  float scaleZ = light.Range;
                                             *  // Need to Abs - if negative it will invert our model and result in incorrect normals
                                             *  float scaleXY = light.Range * Math.Abs((float)Math.Tan(Math.Acos(light.SpotOuterCosine*2)/2));
                                             *
                                             *  world.ScaleVector = new Vector3(scaleXY, scaleXY, scaleZ);
                                             *  world *= rotate;
                                             *  volume = spotLightVolume;
                                             *  break;
                                             * */
                                            default:
                                                continue;
                                            }
                                            world.TranslationVector = light.Position;
                                            volume.World            = world;
                                            // Transpose the PerObject matrices
                                            var transposed = PerObject;
                                            transposed.World = volume.World * PerObject.World;
                                            transposed.WorldViewProjection = transposed.World * PerObject.ViewProjection;
                                            transposed.Transpose();
                                            context.UpdateSubresource(ref transposed, PerObjectBuffer);

                                            if (cameraParams.ZFar < viewSpacePos.Z + light.Range)
                                            {
                                                // Cull the back face and only render where there is something
                                                // behind the front face.
                                                context.Rasterizer.State = rsCullBack;
                                                context.OutputMerger.DepthStencilState = depthLessThan;
                                            }
                                            else
                                            {
                                                // Cull front faces and only render where there is something
                                                // before the back face.
                                                context.Rasterizer.State = rsCullFront;
                                                context.OutputMerger.DepthStencilState = depthGreaterThan;
                                            }
                                            volume.Render();

                                            // Show the light volumes for debugging
                                            if (Debug > 0)
                                            {
                                                if (Debug == 1)
                                                {
                                                    context.OutputMerger.SetDepthStencilState(depthGreaterThan);
                                                }
                                                else
                                                {
                                                    context.OutputMerger.SetDepthStencilState(depthLessThan);
                                                }
                                                context.PixelShader.Set(psDebugLight);
                                                context.Rasterizer.State = rsWireframe;
                                                volume.Render();
                                            }
                                        }
                                    }

                                    // Reset pixel shader resources (all to null)
                                    context.PixelShader.SetShaderResources(0, new ShaderResourceView[gbuffer.SRVs.Count + 1]);

                                    // Restore context states
                                    context.PixelShader.Set(oldPixelShader);
                                    context.VertexShader.Set(oldVertexShader);
                                    context.InputAssembler.InputLayout = oldVertexLayout;
                                    context.OutputMerger.SetBlendState(oldBlendState, oldBlendFactor, oldSampleMaskRef);
                                    context.OutputMerger.SetDepthStencilState(oldDepthState, oldStencilRef);
                                    context.Rasterizer.State = oldRSState;
                                }
        }
Exemple #12
0
    /// <summary>
    /// Returns list of all points in viewField using oc for occlusion approximation.
    /// Will update occluded and notInView to lists of vertices in those categories.
    /// NOTE: clearing lists, resetting metadata must be done BEFORE calling method.
    /// </summary>
    public List <Vector3> AllInView(List <SurfacePoints> extras, List <bool> meshGuide, Frustum viewField, Occlusion oc)
    {
        for (int i = 0; i < extras.Count; i++)
        {
            // check visibility
            if (meshGuide[i])
            {
                foreach (Vector3 pt in extras[i].Wvertices)
                {
                    Vector3    Pvec = Vector(viewField.Transform.position, pt);
                    ViewVector vv   = viewField.ViewVec(pt);

                    if (viewField.FOV.Contains(vv))
                    {
                        Vector2 coords = vv.Map <Occlusion.OcclusionCell>(oc.grid, viewField.FOV);

                        if (oc.grid[(int)coords.x, (int)coords.y].nullCell ||
                            Pvec.magnitude < oc.grid[(int)coords.x, (int)coords.y].distance)
                        {
                            oc.grid[(int)coords.x, (int)coords.y].closest  = pt;
                            oc.grid[(int)coords.x, (int)coords.y].distance = Pvec.magnitude;
                            oc.grid[(int)coords.x, (int)coords.y].nullCell = false;
                            NonOccludedVertices++;
                        }
                        VerticesInView++;
                    }
                    CheckedVertices++;
                }
            }
        }
        return(oc.Points());
    }
Exemple #13
0
    /// <summary>
    /// Returns list of PointValues representing raster values projected onto visible points from viewField.
    /// </summary>
    public List <PointValue <T> > ProjectToVisible <T>(List <SurfacePoints> extras, List <bool> meshGuide, Frustum viewField, Occlusion oc, T[,] raster)
    {
        // reset metadata
        CheckedVertices     = 0;
        VerticesInView      = 0;
        NonOccludedVertices = 0;

        // gather visible vertices from all meshes
        List <Vector3> visible = AllInView(extras, meshGuide, viewField, oc);

        // project to visible
        return(Project <T>(visible, viewField, raster));
    }
 /// <summary>
 ///     Adds all sections inside of the frustum to the render list.
 /// </summary>
 /// <param name="frustum">The view frustum to use for culling.</param>
 /// <param name="renderList">The list to add the chunks and positions too.</param>
 public void AddCulledToRenderList(Frustum frustum, List <(ClientSection section, Vector3 position)> renderList)
Exemple #15
0
 /// <summary>
 /// The visibility of a bounding box in local space, in a view frustum defined in deformed space.
 /// </summary>
 /// <param name="node">
 /// A TerrainNode. This is node is used to get the camera position in local and deformed space with
 /// <code>TerrainNode.GetLocalCamera</code> and <code>TerrainNode.GetDeformedCamera</code>,
 /// as well as the view frustum planes in deformed space with <code>TerrainNode.GetDeformedFrustumPlanes</code>.
 /// </param>
 /// <param name="localBox">a bounding box in local space.</param>
 /// <returns>Returns the visibility of a bounding box in local space, in a view frustum defined in deformed space.</returns>
 public virtual Frustum.VISIBILITY GetVisibility(TerrainNode node, Box3d localBox)
 {
     // localBox = deformedBox, so we can compare the deformed frustum with it
     return(Frustum.GetVisibility(node.DeformedFrustumPlanes, localBox));
 }
Exemple #16
0
        public void FrustumTransformTests()
        {
            {
                Frustum frustum = new Frustum(
                    new Plane(new Vector3(1, 0, 0), -20),
                    new Plane(new Vector3(-1, 0, 0), -20),
                    new Plane(new Vector3(0, 1, 0), -20),
                    new Plane(new Vector3(0, -1, 0), -20),
                    new Plane(new Vector3(0, 0, 1), -20),
                    new Plane(new Vector3(0, 0, -1), -20));

                Vector3Tests.TestFrustumClipLine(frustum, new Vector3(-5, 0, 0), new Vector3(5, 0, 0), true, new Vector3(-5, 0, 0), new Vector3(5, 0, 0));
                Vector3Tests.TestFrustumClipLine(frustum, new Vector3(-50, 0, 0), new Vector3(-21, 0, 0), false, null, null);
                Vector3Tests.TestFrustumClipLine(frustum, new Vector3(-50, 0, 0), new Vector3(-19, 0, 0), true, new Vector3(-20, 0, 0), new Vector3(-19, 0, 0));

                // moved right
                {
                    Frustum movedRightFrustum = Frustum.Transform(frustum, Matrix4X4.CreateTranslation(10, 0, 0));
                    Assert.IsTrue(movedRightFrustum.Planes[0] == new Plane(new Vector3(1, 0, 0), -10));
                    Assert.IsTrue(movedRightFrustum.Planes[1] == new Plane(new Vector3(-1, 0, 0), -30));
                    Assert.IsTrue(movedRightFrustum.Planes[2] == frustum.Planes[2]);
                    Assert.IsTrue(movedRightFrustum.Planes[3] == frustum.Planes[3]);
                    Assert.IsTrue(movedRightFrustum.Planes[4] == frustum.Planes[4]);
                    Assert.IsTrue(movedRightFrustum.Planes[5] == frustum.Planes[5]);

                    Vector3Tests.TestFrustumClipLine(movedRightFrustum, new Vector3(-5, 0, 0), new Vector3(5, 0, 0), true, new Vector3(-5, 0, 0), new Vector3(5, 0, 0));
                    Vector3Tests.TestFrustumClipLine(movedRightFrustum, new Vector3(-50, 0, 0), new Vector3(-11, 0, 0), false, null, null);
                    Vector3Tests.TestFrustumClipLine(movedRightFrustum, new Vector3(-50, 0, 0), new Vector3(-9, 0, 0), true, new Vector3(-10, 0, 0), new Vector3(-9, 0, 0));
                }

                // rotated right
                {
                    Frustum   movedRightFrustum = Frustum.Transform(frustum, Matrix4X4.CreateRotationY(MathHelper.DegreesToRadians(45)));
                    Matrix4X4 testMatrix        = Matrix4X4.CreateRotationY(MathHelper.DegreesToRadians(45));
                    Plane     control           = new Plane(Vector3Ex.TransformNormal(frustum.Planes[0].Normal, testMatrix), frustum.Planes[0].DistanceFromOrigin);
                    Assert.IsTrue(movedRightFrustum.Planes[0].Equals(control, .001, .01));
                    Assert.IsTrue(movedRightFrustum.Planes[1].Equals(new Plane(Vector3Ex.TransformNormal(frustum.Planes[1].Normal, testMatrix), frustum.Planes[1].DistanceFromOrigin)));
                    Assert.IsTrue(movedRightFrustum.Planes[2].Equals(frustum.Planes[2]));
                    Assert.IsTrue(movedRightFrustum.Planes[3].Equals(frustum.Planes[3]));
                    Assert.IsTrue(movedRightFrustum.Planes[4].Equals(new Plane(Vector3Ex.TransformNormal(frustum.Planes[4].Normal, testMatrix), frustum.Planes[4].DistanceFromOrigin)));
                    Assert.IsTrue(movedRightFrustum.Planes[5].Equals(new Plane(Vector3Ex.TransformNormal(frustum.Planes[5].Normal, testMatrix), frustum.Planes[5].DistanceFromOrigin)));
                }
            }

            // rotate about y 180 degrees
            {
                Matrix4X4 perspectiveMatrix  = Matrix4X4.CreatePerspectiveFieldOfView(MathHelper.Tau / 4, 1, 3, 507);
                Frustum   perspectiveFrustum = Frustum.FrustumFromProjectionMatrix(perspectiveMatrix);

                Vector3Tests.TestFrustumClipLine(perspectiveFrustum, new Vector3(-10, 0, -5), new Vector3(0, 0, -5), true, new Vector3(-5, 0, -5), new Vector3(0, 0, -5));
                Vector3Tests.TestFrustumClipLine(perspectiveFrustum, new Vector3(-50, 0, -5), new Vector3(-21, 0, -5), false, null, null);
                Vector3Tests.TestFrustumClipLine(perspectiveFrustum, new Vector3(-50, 0, -20), new Vector3(-19, 0, -20), true, new Vector3(-20, 0, -20), new Vector3(-19, 0, -20));

                var frustum = Frustum.Transform(perspectiveFrustum, Matrix4X4.CreateRotationY(MathHelper.Tau / 2));

                // left
                Assert.IsTrue(frustum.Planes[0].Normal.Equals(new Vector3(-1, 0, 1).GetNormal(), .0001));
                Assert.AreEqual(frustum.Planes[0].DistanceFromOrigin, 0, .0001);
                // right
                Assert.IsTrue(frustum.Planes[1].Normal.Equals(new Vector3(1, 0, 1).GetNormal(), .0001));
                Assert.AreEqual(frustum.Planes[1].DistanceFromOrigin, 0, .0001);
                // bottom
                Assert.IsTrue(frustum.Planes[2].Normal.Equals(new Vector3(0, 1, 1).GetNormal(), .0001));
                Assert.AreEqual(frustum.Planes[2].DistanceFromOrigin, 0, .0001);
                // top
                Assert.IsTrue(frustum.Planes[3].Normal.Equals(new Vector3(0, -1, 1).GetNormal(), .0001));
                Assert.AreEqual(frustum.Planes[3].DistanceFromOrigin, 0, .0001);
                // near
                Assert.IsTrue(frustum.Planes[4].Normal.Equals(new Vector3(0, 0, 1), .0001));
                Assert.AreEqual(frustum.Planes[4].DistanceFromOrigin, 3, .0001);
                // far
                Assert.IsTrue(frustum.Planes[5].Normal.Equals(new Vector3(0, 0, -1), .0001));
                Assert.AreEqual(frustum.Planes[5].DistanceFromOrigin, -507, .0001);
            }

            // translate 10 down z
            {
                double    zMove             = 10;
                Matrix4X4 perspectiveMatrix = Matrix4X4.CreatePerspectiveFieldOfView(MathHelper.Tau / 4, 1, 3, 507);
                Frustum   frustum           = Frustum.FrustumFromProjectionMatrix(perspectiveMatrix);
                frustum = Frustum.Transform(frustum, Matrix4X4.CreateTranslation(0, 0, -10));

                double expectedPlaneOffset = Math.Sqrt(2 * (zMove / 2) * (zMove / 2));
                // left
                Assert.IsTrue(frustum.Planes[0].Normal.Equals(new Vector3(1, 0, -1).GetNormal(), .0001));
                Assert.AreEqual(expectedPlaneOffset, frustum.Planes[0].DistanceFromOrigin, .0001);
                // right
                Assert.IsTrue(frustum.Planes[1].Normal.Equals(new Vector3(-1, 0, -1).GetNormal(), .0001));
                Assert.AreEqual(expectedPlaneOffset, frustum.Planes[1].DistanceFromOrigin, .0001);
                // bottom
                Assert.IsTrue(frustum.Planes[2].Normal.Equals(new Vector3(0, 1, -1).GetNormal(), .0001));
                Assert.AreEqual(expectedPlaneOffset, frustum.Planes[2].DistanceFromOrigin, .0001);
                // top
                Assert.IsTrue(frustum.Planes[3].Normal.Equals(new Vector3(0, -1, -1).GetNormal(), .0001));
                Assert.AreEqual(expectedPlaneOffset, frustum.Planes[3].DistanceFromOrigin, .0001);
                // near
                Assert.IsTrue(frustum.Planes[4].Normal.Equals(new Vector3(0, 0, -1), .0001));
                Assert.AreEqual(frustum.Planes[4].DistanceFromOrigin, 3 + zMove, .0001);
                // far
                Assert.IsTrue(frustum.Planes[5].Normal.Equals(new Vector3(0, 0, 1), .0001));
                Assert.AreEqual(frustum.Planes[5].DistanceFromOrigin, -507 - zMove, .0001);
            }
        }
Exemple #17
0
        protected void InitOnce()
        {
            var shaderdeffile = Manager.Files.Get(@"Shaders\DeferredRendering.hlsl", false);
            var bbuffer = new byte[shaderdeffile.Length];
            shaderdeffile.Read(bbuffer,0, bbuffer.Length);
            shaderdeffile.Dispose();
            var bytecode = ShaderBytecode.Compile(bbuffer, "fx_5_0");
            bbuffer = null;
            _effect = new Effect(D3DDevice, bytecode);
            bytecode.Dispose();

            _composeTechnique = _effect.GetTechniqueByName("Compose");
            _composePass = _composeTechnique.GetPassByIndex(0);

            var vertices = new DataStream(20 * 4, true, true);
            vertices.Write(new Vector3(-1f, -1f, -1f));
            vertices.Write(new Vector2(0f,1f));
            vertices.Write(new Vector3(-1f, 1f, -1f));
            vertices.Write(new Vector2(0f, 0f));
            vertices.Write(new Vector3(1f, -1f, -1f));
            vertices.Write(new Vector2(1f, 1f));
            vertices.Write(new Vector3(1f, 1f, -1f));
            vertices.Write(new Vector2(1f, 0f));
            vertices.Position = 0;
            _composeVertices = new Buffer(D3DDevice, vertices, 20 * 4, ResourceUsage.Default, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);
            _composeVerticesBB = new VertexBufferBinding(_composeVertices, 20, 0);
            vertices.Dispose();
            _composeLayout = new InputLayout(D3DDevice, _composePass.Description.Signature, new[] { new InputElement("POSITION", 0, Format.R32G32B32_Float, 0), new InputElement("TEXCOORD", 0, Format.R32G32_Float, 0) });
            var sampleMode = SamplerState.FromDescription(D3DDevice, new SamplerDescription()
            {
                AddressU = TextureAddressMode.Clamp,
                AddressV = TextureAddressMode.Clamp,
                AddressW = TextureAddressMode.Clamp,
                BorderColor = new Color4(0, 0, 0, 0),
                Filter = Filter.MinLinearMagMipPoint,
                ComparisonFunction = Comparison.Always,
                MipLodBias = 0f,
                MaximumAnisotropy = 8,
                MinimumLod = 0f,
                MaximumLod = float.MaxValue
            });
            _effect.GetVariableByName("composeSampler").AsSampler().SetSamplerState(0, sampleMode);
            sampleMode.Dispose();
            _effect.GetVariableByName("composeFlags").AsScalar().Set(
                Manager.Opts.Get<bool>("rndr_rawGBufferView")?0x1:0
                );

            NotifyHandlers.Add(Manager.Opts.RegisterChangeNotification("rndr_rawGBufferView", delegate(string key, object value)
            {
                Output.BeginInvoke((Action)delegate
                {
                    if ((bool)value)
                        _effect.GetVariableByName("composeFlags").AsScalar().Set(_effect.GetVariableByName("composeFlags").AsScalar().GetInt() | 0x1);
                    else
                        _effect.GetVariableByName("composeFlags").AsScalar().Set(_effect.GetVariableByName("composeFlags").AsScalar().GetInt() & (int.MaxValue - 0x1));
                });
            }));

            NotifyHandlers.Add(Manager.Opts.RegisterChangeNotification("rndr_nearPlane", delegate {
                Output.BeginInvoke((Action)ResetDevice);
            }));
            NotifyHandlers.Add(Manager.Opts.RegisterChangeNotification("rndr_farPlane", delegate {
                Output.BeginInvoke((Action)ResetDevice);
            }));

            SceneRasterizer = RasterizerState.FromDescription(D3DDevice, new RasterizerStateDescription()
                                                                                      {
                                                                                          FillMode = (Manager.Opts.Get<bool>("rndr_wireframe") ? FillMode.Wireframe : FillMode.Solid),
                                                                                          CullMode = (Manager.Opts.Get<bool>("rndr_cull") ? CullMode.Back : CullMode.None)
                                                                                      });
            _composeRasterizer = RasterizerState.FromDescription(D3DDevice, new RasterizerStateDescription()
            {
                FillMode = FillMode.Solid,
                CullMode = CullMode.Back
            });

            var bsd = new BlendStateDescription();
            bsd.RenderTargets[0].BlendEnable = true;

            bsd.RenderTargets[0].SourceBlend = BlendOption.SourceAlpha;
            bsd.RenderTargets[0].DestinationBlend = BlendOption.InverseSourceAlpha;
            bsd.RenderTargets[0].BlendOperation = BlendOperation.Add;

            bsd.RenderTargets[0].SourceBlendAlpha = BlendOption.One;
            bsd.RenderTargets[0].DestinationBlendAlpha = BlendOption.Zero;
            bsd.RenderTargets[0].BlendOperationAlpha = BlendOperation.Add;
            bsd.RenderTargets[0].RenderTargetWriteMask = ColorWriteMaskFlags.All;

            Context.OutputMerger.BlendState = BlendState.FromDescription(D3DDevice, bsd);

            NotifyHandlers.Add(Manager.Opts.RegisterChangeNotification("rndr_wireframe", delegate(string key, object value)
            {
                Output.BeginInvoke((Action)delegate {
                    var oldcullmode = CullMode.Back;
                    if (SceneRasterizer != null)
                    {
                        oldcullmode = SceneRasterizer.Description.CullMode;
                        SceneRasterizer.Dispose();
                    }
                    SceneRasterizer = RasterizerState.FromDescription(D3DDevice, new RasterizerStateDescription()
                    {
                        FillMode = (((bool)value) ? FillMode.Wireframe : FillMode.Solid),
                        CullMode = oldcullmode
                    });
                });
            }));
            NotifyHandlers.Add(Manager.Opts.RegisterChangeNotification("rndr_cull", delegate(string key, object value)
            {
                Output.BeginInvoke((Action)delegate
                {
                    var oldfillmode = FillMode.Solid;
                    if (SceneRasterizer != null)
                    {
                        oldfillmode = SceneRasterizer.Description.FillMode;
                        SceneRasterizer.Dispose();
                    }
                    SceneRasterizer = RasterizerState.FromDescription(D3DDevice, new RasterizerStateDescription()
                    {
                        FillMode =  oldfillmode,
                        CullMode = (((bool)value) ? CullMode.Back : CullMode.None)
                    });
                });
            }));

            Context.OutputMerger.DepthStencilState = DepthStencilState.FromDescription(D3DDevice, new DepthStencilStateDescription()
                                                                                                      {
                                                                                                          IsDepthEnabled = true,
                                                                                                          DepthWriteMask = DepthWriteMask.All,
                                                                                                          DepthComparison = Comparison.Less,
                                                                                                          IsStencilEnabled = false,
                                                                                                      });
            _camIncorporeal = Manager.Opts.Get<bool>("rndr_incorporeal");
            NotifyHandlers.Add(Manager.Opts.RegisterChangeNotification("rndr_incorporeal", delegate(string key, object value) { _camIncorporeal = (bool)value; }));

            ViewerLocation = new Vector3(-1, 1, -1);
            ViewerLookAt = new Vector3(0, 0, 0);
            ViewerUpVector = Vector3.UnitY;
            _camLocationIncorporeal = new Vector3(-1, 1, -1);
            _camLookAtIncorporeal = new Vector3(0, 0, 0);
            _camUpVectorIncorporeal = Vector3.UnitY;
            ViewerFrustum = new Frustum();
            _fpsTimer = new HTimer();
            _fpsRingbuffer = new double[60];
            _fpsRingbufferIndex = 0;
        }
Exemple #18
0
 // Function for determining if box is in view
 public bool BoundingVolumeIsInView(BoundingBox box)
 {
     // Check that box is completely outside frustum
     return(Frustum.Contains(box) != ContainmentType.Disjoint);
 }
Exemple #19
0
 public override bool UpdateConvex()
 {
     return((_frustum = Frustum.Create(_attachedCamera)) != null);
 }
        public override void PrepareVisibleObjects(ICamera camera, PassData batchHelper)
        {
            batchHelper.visibleObjects.FastClear();

            Frustum frus = camera.Frustum;

            Vector3 camPos = camera.Position;

            // do a BFS pass here

            queue.Enqueue(octRootNode);

            while (queue.Count > 0)
            {
                OctreeSceneNode node = queue.Dequeue();

                Vector3 dir;
                Vector3 center2;

                if (frus.IntersectsSphere(ref node.BoundingSphere.Center, node.BoundingSphere.Radius))
                {
                    for (int i = 0; i < 2; i++)
                    {
                        for (int j = 0; j < 2; j++)
                        {
                            for (int k = 0; k < 2; k++)
                            {
                                if (node[i, j, k] != null)
                                {
                                    queue.Enqueue(node[i, j, k]);
                                }
                            }
                        }
                    }
                    FastList <SceneObject> objs = node.AttchedObjects;
                    for (int i = 0; i < objs.Count; i++)
                    {
                        SceneObject obj = objs.Elements[i];
                        dir = obj.BoundingSphere.Center;
                        dir.Normalize();

                        Vector3.Multiply(ref dir, obj.BoundingSphere.Radius, out dir);
                        Vector3.Subtract(ref obj.BoundingSphere.Center, ref dir, out center2);

                        Vector3.Subtract(ref center2, ref camPos, out dir);

                        if (Vector3.Dot(ref dir, ref center2) <= 0)
                        {
                            int level = GetLevel(ref obj.BoundingSphere, ref camPos);

                            if (obj.HasSubObjects)
                            {
                                obj.PrepareVisibleObjects(camera, level);
                            }
                            AddVisibleObject(obj, level, batchHelper);
                        }
                    }
                }
            }

            for (int i = 0; i < farObjects.Count; i++)
            {
                int level = GetLevel(ref farObjects[i].BoundingSphere, ref camPos);

                AddVisibleObject(farObjects[i], level, batchHelper);
            }
        }
Exemple #21
0
        // Pass all the systems that may want to update per-camera data here.
        // That way you will never update an HDCamera and forget to update the dependent system.
        public void Update(FrameSettings currentFrameSettings, PostProcessLayer postProcessLayer, VolumetricLightingSystem vlSys, MSAASamples msaaSamples)
        {
            // store a shortcut on HDAdditionalCameraData (done here and not in the constructor as
            // we don't create HDCamera at every frame and user can change the HDAdditionalData later (Like when they create a new scene).
            m_AdditionalCameraData = camera.GetComponent <HDAdditionalCameraData>();

            m_frameSettings = currentFrameSettings;

            // Handle memory allocation.
            {
                bool isColorPyramidHistoryRequired = m_frameSettings.enableSSR; // TODO: TAA as well
                bool isVolumetricHistoryRequired   = m_frameSettings.enableVolumetrics && m_frameSettings.enableReprojectionForVolumetrics;

                int numColorPyramidBuffersRequired = isColorPyramidHistoryRequired ? 2 : 1; // TODO: 1 -> 0
                int numVolumetricBuffersRequired   = isVolumetricHistoryRequired   ? 2 : 0; // History + feedback

                if ((numColorPyramidBuffersAllocated != numColorPyramidBuffersRequired) ||
                    (numVolumetricBuffersAllocated != numVolumetricBuffersRequired))
                {
                    // Reinit the system.
                    colorPyramidHistoryIsValid = false;
                    vlSys.DeinitializePerCameraData(this);

                    // The history system only supports the "nuke all" option.
                    m_HistoryRTSystem.Dispose();
                    m_HistoryRTSystem = new BufferedRTHandleSystem();

                    if (numColorPyramidBuffersRequired != 0)
                    {
                        AllocHistoryFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain, HistoryBufferAllocatorFunction, numColorPyramidBuffersRequired);
                        colorPyramidHistoryIsValid = false;
                    }

                    vlSys.InitializePerCameraData(this, numVolumetricBuffersRequired);

                    // Mark as init.
                    numColorPyramidBuffersAllocated = numColorPyramidBuffersRequired;
                    numVolumetricBuffersAllocated   = numVolumetricBuffersRequired;
                }
            }

            // In stereo, this corresponds to the center eye position
            var pos = camera.transform.position;

            worldSpaceCameraPos = pos;

            // If TAA is enabled projMatrix will hold a jittered projection matrix. The original,
            // non-jittered projection matrix can be accessed via nonJitteredProjMatrix.
            bool taaEnabled = camera.cameraType == CameraType.Game &&
                              HDUtils.IsTemporalAntialiasingActive(postProcessLayer) &&
                              m_frameSettings.enablePostprocess;

            var nonJitteredCameraProj = camera.projectionMatrix;
            var cameraProj            = taaEnabled
                ? postProcessLayer.temporalAntialiasing.GetJitteredProjectionMatrix(camera)
                : nonJitteredCameraProj;

            // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
            // (different Z value ranges etc.)
            var gpuProj            = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false'
            var gpuView            = camera.worldToCameraMatrix;
            var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);

            // Update viewport sizes.
            m_ViewportSizePrevFrame = new Vector2Int(m_ActualWidth, m_ActualHeight);
            m_ActualWidth           = camera.pixelWidth;
            m_ActualHeight          = camera.pixelHeight;

            var screenWidth  = m_ActualWidth;
            var screenHeight = m_ActualHeight;

            textureWidthScaling = new Vector4(1.0f, 1.0f, 0.0f, 0.0f);

            numEyes = camera.stereoEnabled ? (uint)2 : (uint)1; // TODO VR: Generalize this when support for >2 eyes comes out with XR SDK

            if (camera.stereoEnabled)
            {
                textureWidthScaling = new Vector4(2.0f, 0.5f, 0.0f, 0.0f);
                for (uint eyeIndex = 0; eyeIndex < 2; eyeIndex++)
                {
                    // For VR, TAA proj matrices don't need to be jittered
                    var currProjStereo    = camera.GetStereoProjectionMatrix((Camera.StereoscopicEye)eyeIndex);
                    var gpuCurrProjStereo = GL.GetGPUProjectionMatrix(currProjStereo, true);
                    var gpuCurrViewStereo = camera.GetStereoViewMatrix((Camera.StereoscopicEye)eyeIndex);

                    if (ShaderConfig.s_CameraRelativeRendering != 0)
                    {
                        // Zero out the translation component.
                        gpuCurrViewStereo.SetColumn(3, new Vector4(0, 0, 0, 1));
                    }
                    var gpuCurrVPStereo = gpuCurrProjStereo * gpuCurrViewStereo;

                    // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
                    if (m_LastFrameActive != Time.frameCount)
                    {
                        if (isFirstFrame)
                        {
                            prevViewProjMatrixStereo[eyeIndex] = gpuCurrVPStereo;
                        }
                        else
                        {
                            prevViewProjMatrixStereo[eyeIndex] = GetViewProjMatrixStereo(eyeIndex); // Grabbing this before ConfigureStereoMatrices updates view/proj
                        }

                        isFirstFrame = false;
                    }
                }
                isFirstFrame = true; // So that mono vars can still update when stereo active

                screenWidth  = XRGraphics.eyeTextureWidth;
                screenHeight = XRGraphics.eyeTextureHeight;

                var xrDesc = XRGraphics.eyeTextureDesc;
                m_ActualWidth  = xrDesc.width;
                m_ActualHeight = xrDesc.height;
            }

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                // Zero out the translation component.
                gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
            }

            var gpuVP = gpuNonJitteredProj * gpuView;

            // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
            if (m_LastFrameActive != Time.frameCount)
            {
                if (isFirstFrame)
                {
                    prevCameraPos      = pos;
                    prevViewProjMatrix = gpuVP;
                }
                else
                {
                    prevCameraPos      = cameraPos;
                    prevViewProjMatrix = nonJitteredViewProjMatrix;
                }

                isFirstFrame = false;
            }

            taaFrameIndex    = taaEnabled ? (uint)postProcessLayer.temporalAntialiasing.sampleIndex : 0;
            taaFrameRotation = new Vector2(Mathf.Sin(taaFrameIndex * (0.5f * Mathf.PI)),
                                           Mathf.Cos(taaFrameIndex * (0.5f * Mathf.PI)));

            viewMatrix            = gpuView;
            projMatrix            = gpuProj;
            nonJitteredProjMatrix = gpuNonJitteredProj;
            cameraPos             = pos;

            ConfigureStereoMatrices();

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                Matrix4x4 cameraDisplacement = Matrix4x4.Translate(cameraPos - prevCameraPos); // Non-camera-relative positions
                prevViewProjMatrix *= cameraDisplacement;                                      // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS
            }

            float n = camera.nearClipPlane;
            float f = camera.farClipPlane;

            // Analyze the projection matrix.
            // p[2][3] = (reverseZ ? 1 : -1) * (depth_0_1 ? 1 : 2) * (f * n) / (f - n)
            float scale     = projMatrix[2, 3] / (f * n) * (f - n);
            bool  depth_0_1 = Mathf.Abs(scale) < 1.5f;
            bool  reverseZ  = scale > 0;
            bool  flipProj  = projMatrix.inverse.MultiplyPoint(new Vector3(0, 1, 0)).y < 0;

            // http://www.humus.name/temp/Linearize%20depth.txt
            if (reverseZ)
            {
                zBufferParams = new Vector4(-1 + f / n, 1, -1 / f + 1 / n, 1 / f);
            }
            else
            {
                zBufferParams = new Vector4(1 - f / n, f / n, 1 / f - 1 / n, 1 / n);
            }

            projectionParams = new Vector4(flipProj ? -1 : 1, n, f, 1.0f / f);

            float orthoHeight = camera.orthographic ? 2 * camera.orthographicSize : 0;
            float orthoWidth  = orthoHeight * camera.aspect;

            unity_OrthoParams = new Vector4(orthoWidth, orthoHeight, 0, camera.orthographic ? 1 : 0);

            Frustum.Create(frustum, viewProjMatrix, depth_0_1, reverseZ);

            // Left, right, top, bottom, near, far.
            for (int i = 0; i < 6; i++)
            {
                frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance);
            }

            m_LastFrameActive = Time.frameCount;

            // TODO: cache this, or make the history system spill the beans...
            Vector2Int prevColorPyramidBufferSize = Vector2Int.zero;

            if (numColorPyramidBuffersAllocated > 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain).rt;

                prevColorPyramidBufferSize.x = rt.width;
                prevColorPyramidBufferSize.y = rt.height;
            }

            // TODO: cache this, or make the history system spill the beans...
            Vector3Int prevVolumetricBufferSize = Vector3Int.zero;

            if (numVolumetricBuffersAllocated != 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting).rt;

                prevVolumetricBufferSize.x = rt.width;
                prevVolumetricBufferSize.y = rt.height;
                prevVolumetricBufferSize.z = rt.volumeDepth;
            }

            // Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
            m_msaaSamples = msaaSamples;
            RTHandles.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_msaaSamples);
            m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_msaaSamples);
            m_HistoryRTSystem.Swap();

            Vector3Int currColorPyramidBufferSize = Vector3Int.zero;

            if (numColorPyramidBuffersAllocated != 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain).rt;

                currColorPyramidBufferSize.x = rt.width;
                currColorPyramidBufferSize.y = rt.height;

                if ((currColorPyramidBufferSize.x != prevColorPyramidBufferSize.x) ||
                    (currColorPyramidBufferSize.y != prevColorPyramidBufferSize.y))
                {
                    // A reallocation has happened, so the new texture likely contains garbage.
                    colorPyramidHistoryIsValid = false;
                }
            }

            Vector3Int currVolumetricBufferSize = Vector3Int.zero;

            if (numVolumetricBuffersAllocated != 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting).rt;

                currVolumetricBufferSize.x = rt.width;
                currVolumetricBufferSize.y = rt.height;
                currVolumetricBufferSize.z = rt.volumeDepth;

                if ((currVolumetricBufferSize.x != prevVolumetricBufferSize.x) ||
                    (currVolumetricBufferSize.y != prevVolumetricBufferSize.y) ||
                    (currVolumetricBufferSize.z != prevVolumetricBufferSize.z))
                {
                    // A reallocation has happened, so the new texture likely contains garbage.
                    volumetricHistoryIsValid = false;
                }
            }

            int maxWidth  = RTHandles.maxWidth;
            int maxHeight = RTHandles.maxHeight;

            Vector2 rcpTextureSize = Vector2.one / new Vector2(maxWidth, maxHeight);

            m_ViewportScalePreviousFrame = m_ViewportSizePrevFrame * rcpTextureSize;
            m_ViewportScaleCurrentFrame  = new Vector2Int(m_ActualWidth, m_ActualHeight) * rcpTextureSize;

            screenSize   = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight);
            screenParams = new Vector4(screenSize.x, screenSize.y, 1 + screenSize.z, 1 + screenSize.w);

            if (vlSys != null)
            {
                vlSys.UpdatePerCameraData(this);
            }

            UpdateVolumeParameters();
        }
Exemple #22
0
 /// <summary>
 /// Draw a line in the scene in 3D but scale it such that it appears as a 2D line in the view.
 /// </summary>
 /// <param name="world"></param>
 /// <param name="clippingFrustum">This is a cache of the frustum from world.
 /// Much faster to pass this way if drawing lots of lines.</param>
 /// <param name="start"></param>
 /// <param name="end"></param>
 /// <param name="color"></param>
 /// <param name="doDepthTest"></param>
 /// <param name="width"></param>
 public static void Render3DLine(this WorldView world, Frustum clippingFrustum, Vector3 start, Vector3 end, Color color, bool doDepthTest = true, double width = 1)
 {
     PrepareFor3DLineRender(doDepthTest);
     world.Render3DLineNoPrep(clippingFrustum, start, end, color, width);
 }
Exemple #23
0
 public bool BoundingVolumeIsInView(BoundingBox box)
 {
     return(Frustum.Contains(box) != ContainmentType.Disjoint);
 }
Exemple #24
0
        public static void Render3DLineNoPrep(this WorldView world, Frustum clippingFrustum, Vector3 start, Vector3 end, Color color, double width = 1)
        {
            if (clippingFrustum.ClipLine(ref start, ref end))
            {
                double unitsPerPixelStart = world.GetWorldUnitsPerScreenPixelAtPosition(start);
                double unitsPerPixelEnd   = world.GetWorldUnitsPerScreenPixelAtPosition(end);

                Vector3   delta           = start - end;
                var       deltaLength     = delta.Length;
                Matrix4X4 rotateTransform = Matrix4X4.CreateRotation(new Quaternion(Vector3.UnitX + new Vector3(.0001, -.00001, .00002), -delta / deltaLength));
                Matrix4X4 scaleTransform  = Matrix4X4.CreateScale(deltaLength, 1, 1);
                Vector3   lineCenter      = (start + end) / 2;
                Matrix4X4 lineTransform   = scaleTransform * rotateTransform * Matrix4X4.CreateTranslation(lineCenter);

                var startScale = unitsPerPixelStart * width;
                var endScale   = unitsPerPixelEnd * width;
                for (int i = 0; i < unscaledLineMesh.Vertices.Count; i++)
                {
                    var vertexPosition = unscaledLineMesh.Vertices[i];
                    if (vertexPosition.X < 0)
                    {
                        scaledLineMesh.Vertices[i] = new Vector3Float(vertexPosition.X, vertexPosition.Y * startScale, vertexPosition.Z * startScale);
                    }
                    else
                    {
                        scaledLineMesh.Vertices[i] = new Vector3Float(vertexPosition.X, vertexPosition.Y * endScale, vertexPosition.Z * endScale);
                    }
                }

                if (true)
                {
                    GL.Color4(color.Red0To255, color.Green0To255, color.Blue0To255, color.Alpha0To255);

                    if (color.Alpha0To1 < 1)
                    {
                        GL.Enable(EnableCap.Blend);
                    }
                    else
                    {
                        //GL.Disable(EnableCap.Blend);
                    }

                    GL.MatrixMode(MatrixMode.Modelview);
                    GL.PushMatrix();
                    GL.MultMatrix(lineTransform.GetAsFloatArray());

                    GL.Begin(BeginMode.Triangles);
                    for (int faceIndex = 0; faceIndex < scaledLineMesh.Faces.Count; faceIndex++)
                    {
                        var face     = scaledLineMesh.Faces[faceIndex];
                        var vertices = scaledLineMesh.Vertices;
                        var position = vertices[face.v0];
                        GL.Vertex3(position.X, position.Y, position.Z);
                        position = vertices[face.v1];
                        GL.Vertex3(position.X, position.Y, position.Z);
                        position = vertices[face.v2];
                        GL.Vertex3(position.X, position.Y, position.Z);
                    }
                    GL.End();
                    GL.PopMatrix();
                }
                else
                {
                    scaledLineMesh.MarkAsChanged();

                    GLHelper.Render(scaledLineMesh, color, lineTransform, RenderTypes.Shaded);
                }
            }
        }
Exemple #25
0
        public bool Render()
        {
            // Clear the buffer to begin the scene.
            D3D.BeginScene(0f, 0f, 0f, 1f);

            // Generate the view matrix based on the camera position.
            Camera.Render();

            // Get the world, view, and projection matrices from camera and d3d objects.
            var viewMatrix       = Camera.ViewMatrix;
            var worldMatrix      = D3D.WorldMatrix;
            var projectionMatrix = D3D.ProjectionMatrix;
            var orthoMatrix      = D3D.OrthoMatrix;

            // Construct the frustum.
            Frustum.ConstructFrustum(SystemConfiguration.ScreenDepth, projectionMatrix, viewMatrix);

            // Initialize the count of the models that have been rendered.
            var renderCount = 0;

            Vector3 position;
            Vector4 color;

            // Go through all models and render them only if they can seen by the camera view.
            for (int index = 0; index < ModelList.ModelCount; index++)
            {
                // Get the position and color of the sphere model at this index.
                ModelList.GetData(index, out position, out color);

                // Set the radius of the sphere to 1.0 since this is already known.
                var radius = 1.0f;

                // Check if the sphere model is in the view frustum.
                var renderModel = Frustum.CheckSphere(position, radius);

                // If it can be seen then render it, if not skip this model and check the next sphere.
                if (renderModel)
                {
                    // Move the model to the location it should be rendered at.
                    worldMatrix = Matrix.Translation(position);

                    // Put the model vertex and index buffer on the graphics pipeline to prepare them for drawing.
                    Model.Render(D3D.DeviceContext);

                    // Render the model using the color shader.
                    if (!LightMapShader.Render(D3D.DeviceContext, Model.IndexCount, worldMatrix, viewMatrix, projectionMatrix, Model.TextureCollection.Select(item => item.TextureResource).ToArray()))
                    {
                        return(false);
                    }

                    // Reset to the original world matrix.
                    worldMatrix = D3D.WorldMatrix;

                    // Since this model was rendered then increase the count for this frame.
                    renderCount++;
                }
            }

            // Set the number of the models that was actually rendered this frame.
            if (!Text.SetRenderCount(renderCount, D3D.DeviceContext))
            {
                return(false);
            }

            // Turn off the Z buffer to begin all 2D rendering.
            D3D.TurnZBufferOff();

            // Turn on the alpha blending before rendering the text.
            D3D.TurnOnAlphaBlending();

            // Render the text string.
            if (!Text.Render(D3D.DeviceContext, worldMatrix, orthoMatrix))
            {
                return(false);
            }

            // Turn off the alpha blending before rendering the text.
            D3D.TurnOffAlphaBlending();

            // Turn on the Z buffer to begin all 2D rendering.
            D3D.TurnZBufferOn();

            // Present the rendered scene to the screen.
            D3D.EndScene();

            return(true);
        }
        // Pass all the systems that may want to update per-camera data here.
        // That way you will never update an HDCamera and forget to update the dependent system.
        public void Update(FrameSettings currentFrameSettings, VolumetricLightingSystem vlSys, MSAASamples msaaSamples)
        {
            // store a shortcut on HDAdditionalCameraData (done here and not in the constructor as
            // we don't create HDCamera at every frame and user can change the HDAdditionalData later (Like when they create a new scene).
            m_AdditionalCameraData = camera.GetComponent <HDAdditionalCameraData>();

            m_frameSettings = currentFrameSettings;

            // Handle post-process AA
            //  - If post-processing is disabled all together, no AA
            //  - In scene view, only enable TAA if animated materials are enabled
            //  - Else just use the currently set AA mode on the camera
            {
                if (!m_frameSettings.IsEnabled(FrameSettingsField.Postprocess) || !CoreUtils.ArePostProcessesEnabled(camera))
                {
                    antialiasing = AntialiasingMode.None;
                }
#if UNITY_EDITOR
                else if (camera.cameraType == CameraType.SceneView)
                {
                    var mode = HDRenderPipelinePreferences.sceneViewAntialiasing;

                    if (mode == AntialiasingMode.TemporalAntialiasing && !CoreUtils.AreAnimatedMaterialsEnabled(camera))
                    {
                        antialiasing = AntialiasingMode.None;
                    }
                    else
                    {
                        antialiasing = mode;
                    }
                }
#endif
                else if (m_AdditionalCameraData != null)
                {
                    antialiasing = m_AdditionalCameraData.antialiasing;
                    if (antialiasing == AntialiasingMode.SubpixelMorphologicalAntiAliasing)
                    {
                        SMAAQuality = m_AdditionalCameraData.SMAAQuality;
                    }
                }
                else
                {
                    antialiasing = AntialiasingMode.None;
                }
            }

            // Handle memory allocation.
            {
                bool isColorPyramidHistoryRequired = m_frameSettings.IsEnabled(FrameSettingsField.SSR); // TODO: TAA as well
                bool isVolumetricHistoryRequired   = m_frameSettings.IsEnabled(FrameSettingsField.Volumetrics) && m_frameSettings.IsEnabled(FrameSettingsField.ReprojectionForVolumetrics);

                int numColorPyramidBuffersRequired = isColorPyramidHistoryRequired ? 2 : 1; // TODO: 1 -> 0
                int numVolumetricBuffersRequired   = isVolumetricHistoryRequired   ? 2 : 0; // History + feedback

                if ((numColorPyramidBuffersAllocated != numColorPyramidBuffersRequired) ||
                    (numVolumetricBuffersAllocated != numVolumetricBuffersRequired))
                {
                    // Reinit the system.
                    colorPyramidHistoryIsValid = false;
                    vlSys.DeinitializePerCameraData(this);

                    // The history system only supports the "nuke all" option.
                    m_HistoryRTSystem.Dispose();
                    m_HistoryRTSystem = new BufferedRTHandleSystem();

                    if (numColorPyramidBuffersRequired != 0)
                    {
                        AllocHistoryFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain, HistoryBufferAllocatorFunction, numColorPyramidBuffersRequired);
                        colorPyramidHistoryIsValid = false;
                    }

                    vlSys.InitializePerCameraData(this, numVolumetricBuffersRequired);

                    // Mark as init.
                    numColorPyramidBuffersAllocated = numColorPyramidBuffersRequired;
                    numVolumetricBuffersAllocated   = numVolumetricBuffersRequired;
                }
            }

            // If TAA is enabled projMatrix will hold a jittered projection matrix. The original,
            // non-jittered projection matrix can be accessed via nonJitteredProjMatrix.
            bool taaEnabled = antialiasing == AntialiasingMode.TemporalAntialiasing;

            if (!taaEnabled)
            {
                taaFrameIndex = 0;
                taaJitter     = Vector4.zero;
            }

            var nonJitteredCameraProj = camera.projectionMatrix;
            var cameraProj            = taaEnabled
                ? GetJitteredProjectionMatrix(nonJitteredCameraProj)
                : nonJitteredCameraProj;

            // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
            // (different Z value ranges etc.)
            var gpuProj            = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false'
            var gpuView            = camera.worldToCameraMatrix;
            var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);

            // Update viewport sizes.
            m_ViewportSizePrevFrame = new Vector2Int(m_ActualWidth, m_ActualHeight);
            m_ActualWidth           = Math.Max(camera.pixelWidth, 1);
            m_ActualHeight          = Math.Max(camera.pixelHeight, 1);

            Vector2Int nonScaledSize = new Vector2Int(m_ActualWidth, m_ActualHeight);
            if (isMainGameView)
            {
                Vector2Int scaledSize = HDDynamicResolutionHandler.instance.GetRTHandleScale(new Vector2Int(camera.pixelWidth, camera.pixelHeight));
                nonScaledSize  = HDDynamicResolutionHandler.instance.cachedOriginalSize;
                m_ActualWidth  = scaledSize.x;
                m_ActualHeight = scaledSize.y;
            }

            var screenWidth  = m_ActualWidth;
            var screenHeight = m_ActualHeight;
            textureWidthScaling = new Vector4(1.0f, 1.0f, 0.0f, 0.0f);

            numEyes = camera.stereoEnabled ? (uint)2 : (uint)1; // TODO VR: Generalize this when support for >2 eyes comes out with XR SDK

            if (camera.stereoEnabled)
            {
                if (XRGraphics.stereoRenderingMode == XRGraphics.StereoRenderingMode.SinglePass)
                {
                    textureWidthScaling = new Vector4(2.0f, 0.5f, 0.0f, 0.0f);
                }

                for (uint eyeIndex = 0; eyeIndex < 2; eyeIndex++)
                {
                    // For VR, TAA proj matrices don't need to be jittered
                    var currProjStereo    = camera.GetStereoProjectionMatrix((Camera.StereoscopicEye)eyeIndex);
                    var gpuCurrProjStereo = GL.GetGPUProjectionMatrix(currProjStereo, true);
                    var gpuCurrViewStereo = camera.GetStereoViewMatrix((Camera.StereoscopicEye)eyeIndex);

                    if (ShaderConfig.s_CameraRelativeRendering != 0)
                    {
                        // Zero out the translation component.
                        gpuCurrViewStereo.SetColumn(3, new Vector4(0, 0, 0, 1));
                    }
                    var gpuCurrVPStereo = gpuCurrProjStereo * gpuCurrViewStereo;

                    // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
                    if (m_LastFrameActive != Time.frameCount)
                    {
                        if (isFirstFrame)
                        {
                            prevWorldSpaceCameraPosStereo[eyeIndex] = gpuCurrViewStereo.inverse.GetColumn(3);
                            prevViewProjMatrixStereo[eyeIndex]      = gpuCurrVPStereo;
                        }
                        else
                        {
                            prevWorldSpaceCameraPosStereo[eyeIndex] = worldSpaceCameraPosStereo[eyeIndex];
                            prevViewProjMatrixStereo[eyeIndex]      = GetViewProjMatrixStereo(eyeIndex); // Grabbing this before ConfigureStereoMatrices updates view/proj
                        }

                        isFirstFrame = false;
                    }
                }

                // XRTODO: fix this
                isFirstFrame = true; // So that mono vars can still update when stereo active

                // XRTODO: remove once SPI is working
                if (XRGraphics.stereoRenderingMode == XRGraphics.StereoRenderingMode.SinglePass)
                {
                    Debug.Assert(HDDynamicResolutionHandler.instance.SoftwareDynamicResIsEnabled() == false);

                    var xrDesc = XRGraphics.eyeTextureDesc;
                    nonScaledSize.x = screenWidth = m_ActualWidth = xrDesc.width;
                    nonScaledSize.y = screenHeight = m_ActualHeight = xrDesc.height;
                }
            }

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                // Zero out the translation component.
                gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
            }

            var gpuVP = gpuNonJitteredProj * gpuView;

            // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
            // Note: if your first rendered view during the frame is not the Game view, everything breaks.
            if (m_LastFrameActive != Time.frameCount)
            {
                if (isFirstFrame)
                {
                    prevWorldSpaceCameraPos = camera.transform.position;
                    prevViewProjMatrix      = gpuVP;
                }
                else
                {
                    prevWorldSpaceCameraPos         = worldSpaceCameraPos;
                    prevViewProjMatrix              = nonJitteredViewProjMatrix;
                    prevViewProjMatrixNoCameraTrans = prevViewProjMatrix;
                }

                isFirstFrame = false;
            }

            // In stereo, this corresponds to the center eye position
            worldSpaceCameraPos = camera.transform.position;

            taaFrameRotation = new Vector2(Mathf.Sin(taaFrameIndex * (0.5f * Mathf.PI)),
                                           Mathf.Cos(taaFrameIndex * (0.5f * Mathf.PI)));

            viewMatrix            = gpuView;
            projMatrix            = gpuProj;
            nonJitteredProjMatrix = gpuNonJitteredProj;

            ConfigureStereoMatrices();

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                prevWorldSpaceCameraPos = worldSpaceCameraPos - prevWorldSpaceCameraPos;
                // This fixes issue with cameraDisplacement stacking in prevViewProjMatrix when same camera renders multiple times each logical frame
                // causing glitchy motion blur when editor paused.
                if (m_LastFrameActive != Time.frameCount)
                {
                    Matrix4x4 cameraDisplacement = Matrix4x4.Translate(prevWorldSpaceCameraPos);
                    prevViewProjMatrix *= cameraDisplacement; // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS
                }
            }
            else
            {
                Matrix4x4 noTransViewMatrix = camera.worldToCameraMatrix;
                noTransViewMatrix.SetColumn(3, new Vector4(0, 0, 0, 1));
                prevViewProjMatrixNoCameraTrans = nonJitteredProjMatrix * noTransViewMatrix;
            }

            float n = camera.nearClipPlane;
            float f = camera.farClipPlane;

            // Analyze the projection matrix.
            // p[2][3] = (reverseZ ? 1 : -1) * (depth_0_1 ? 1 : 2) * (f * n) / (f - n)
            float scale     = projMatrix[2, 3] / (f * n) * (f - n);
            bool  depth_0_1 = Mathf.Abs(scale) < 1.5f;
            bool  reverseZ  = scale > 0;
            bool  flipProj  = projMatrix.inverse.MultiplyPoint(new Vector3(0, 1, 0)).y < 0;

            // http://www.humus.name/temp/Linearize%20depth.txt
            if (reverseZ)
            {
                zBufferParams = new Vector4(-1 + f / n, 1, -1 / f + 1 / n, 1 / f);
            }
            else
            {
                zBufferParams = new Vector4(1 - f / n, f / n, 1 / f - 1 / n, 1 / n);
            }

            projectionParams = new Vector4(flipProj ? -1 : 1, n, f, 1.0f / f);

            float orthoHeight = camera.orthographic ? 2 * camera.orthographicSize : 0;
            float orthoWidth  = orthoHeight * camera.aspect;
            unity_OrthoParams = new Vector4(orthoWidth, orthoHeight, 0, camera.orthographic ? 1 : 0);

            Frustum.Create(frustum, viewProjMatrix, depth_0_1, reverseZ);

            // Left, right, top, bottom, near, far.
            for (int i = 0; i < 6; i++)
            {
                frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance);
            }

            m_LastFrameActive = Time.frameCount;

            // TODO: cache this, or make the history system spill the beans...
            Vector2Int prevColorPyramidBufferSize = Vector2Int.zero;

            if (numColorPyramidBuffersAllocated > 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain).rt;

                prevColorPyramidBufferSize.x = rt.width;
                prevColorPyramidBufferSize.y = rt.height;
            }

            // TODO: cache this, or make the history system spill the beans...
            Vector3Int prevVolumetricBufferSize = Vector3Int.zero;

            if (numVolumetricBuffersAllocated != 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting).rt;

                prevVolumetricBufferSize.x = rt.width;
                prevVolumetricBufferSize.y = rt.height;
                prevVolumetricBufferSize.z = rt.volumeDepth;
            }

            m_msaaSamples = msaaSamples;
            // Here we use the non scaled resolution for the RTHandleSystem ref size because we assume that at some point we will need full resolution anyway.
            // This is also useful because we have some RT after final up-rez that will need the full size.
            RTHandles.SetReferenceSize(nonScaledSize.x, nonScaledSize.y, m_msaaSamples);
            m_HistoryRTSystem.SetReferenceSize(nonScaledSize.x, nonScaledSize.y, m_msaaSamples);
            m_HistoryRTSystem.Swap();

            Vector3Int currColorPyramidBufferSize = Vector3Int.zero;

            if (numColorPyramidBuffersAllocated != 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain).rt;

                currColorPyramidBufferSize.x = rt.width;
                currColorPyramidBufferSize.y = rt.height;

                if ((currColorPyramidBufferSize.x != prevColorPyramidBufferSize.x) ||
                    (currColorPyramidBufferSize.y != prevColorPyramidBufferSize.y))
                {
                    // A reallocation has happened, so the new texture likely contains garbage.
                    colorPyramidHistoryIsValid = false;
                }
            }

            Vector3Int currVolumetricBufferSize = Vector3Int.zero;

            if (numVolumetricBuffersAllocated != 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting).rt;

                currVolumetricBufferSize.x = rt.width;
                currVolumetricBufferSize.y = rt.height;
                currVolumetricBufferSize.z = rt.volumeDepth;

                if ((currVolumetricBufferSize.x != prevVolumetricBufferSize.x) ||
                    (currVolumetricBufferSize.y != prevVolumetricBufferSize.y) ||
                    (currVolumetricBufferSize.z != prevVolumetricBufferSize.z))
                {
                    // A reallocation has happened, so the new texture likely contains garbage.
                    volumetricHistoryIsValid = false;
                }
            }

            int maxWidth  = RTHandles.maxWidth;
            int maxHeight = RTHandles.maxHeight;

            Vector2 rcpTextureSize = Vector2.one / new Vector2(maxWidth, maxHeight);

            m_ViewportScalePreviousFrame = m_ViewportSizePrevFrame * rcpTextureSize;
            m_ViewportScaleCurrentFrame  = new Vector2Int(m_ActualWidth, m_ActualHeight) * rcpTextureSize;

            screenSize   = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight);
            screenParams = new Vector4(screenSize.x, screenSize.y, 1 + screenSize.z, 1 + screenSize.w);

            finalViewport = new Rect(camera.pixelRect.x, camera.pixelRect.y, nonScaledSize.x, nonScaledSize.y);

            if (vlSys != null)
            {
                vlSys.UpdatePerCameraData(this);
            }

            UpdateVolumeParameters();

            m_RecorderCaptureActions = CameraCaptureBridge.GetCaptureActions(camera);
        }
Exemple #27
0
        private void Update(Scene scene, Camera camera)
        {
            // set GL state for depth map
            GL.ClearColor(1, 1, 1, 1);
            GL.Disable(EnableCap.Blend);
            renderer.DepthTest = true;

            // update scene
            if (scene.AutoUpdate)
            {
                scene.UpdateMatrixWorld();
            }

            // update camera matrices and frustum
            camera.matrixWorldInverse = Matrix4.GetInverse(camera.matrixWorld);
            projectionScreenMatrix.MultiplyMatrices(camera.projectionMatrix, camera.matrixWorldInverse);
            frustum = Frustum.FromMatrix(projectionScreenMatrix);

            // render depth map
            renderer.SetRenderTarget(renderTarget);
            renderer.Clear();

            // set object matrices & frustum culling
            renderList.Clear();
            ProjectObject(scene, scene, camera);

            // render regular objects
            foreach (var glObject in renderList)
            {
                var o      = glObject.Object;
                var buffer = glObject.Buffer;

                // TODO: create proper depth material for particles
                if (o is PointCloud /*&& !o.customDepthMaterial*/)
                {
                    continue;
                }

                // var useMorphing = o.geometry.MorphTargets != null && o.geometry.MorphTargets.Count > 0; /* TODO IAN, need this for skinning? && object.Material.morphTargets */
                //     var useSkinning = o is SkinnedMesh; /* TODO IAN, need this for skinning? && object.Material.skinning */

                Material material;
                if (o.customDepthMaterial != null)
                {
                    material = o.customDepthMaterial;
                }
                //else if ( useSkinning ) material = useMorphing ? depthMaterialMorphSkin : depthMaterialSkin;
                //else if ( useMorphing ) material = depthMaterialMorph;
                else
                {
                    material = depthMaterial;
                }

                renderer.RenderBuffer(camera, scene.lights, null, material, buffer, o);
            }

            // restore GL state
            var clearColor = renderer.ClearColor;

            GL.ClearColor(clearColor.R, clearColor.G, clearColor.B, 1);
            GL.Enable(EnableCap.Blend);
        }
        public bool Render(DDX11 direct3D, DShaderManager shaderManager, DTextureManager textureManager)
        {
            // Generate the view matrix based on the camera's position.
            Camera.Render();

            // Get the world, view, and projection matrices from the camera and d3d objects.
            Matrix worldMatrix      = direct3D.WorldMatrix;
            Matrix viewCameraMatrix = Camera.ViewMatrix;
            Matrix projectionMatrix = direct3D.ProjectionMatrix;
            Matrix baseViewMatrix   = Camera.BaseViewMatrix;
            Matrix orthoMatrix      = direct3D.OrthoMatrix;

            // Construct the frustum.
            Frustum.ConstructFrustum(projectionMatrix, viewCameraMatrix);

            // Clear the buffers to begin the scene.
            direct3D.BeginScene(0.0f, 0.0f, 0.0f, 1.0f);

            // Turn off back face culling and turn off the Z buffer.
            direct3D.TurnOffCulling();
            direct3D.TurnZBufferOff();

            // Translate the sky dome to be centered around the camera position.
            Matrix.Translation(Camera.GetPosition().X, Camera.GetPosition().Y, Camera.GetPosition().Z, out worldMatrix);

            // Render the sky dome using the sky dome shader.
            SkyDomeModel.Render(direct3D.DeviceContext);
            if (!shaderManager.RenderSkyDomeShader(direct3D.DeviceContext, SkyDomeModel.IndexCount, worldMatrix, viewCameraMatrix, projectionMatrix, SkyDomeModel.m_apexColor, SkyDomeModel.m_centerColor))
            {
                return(false);
            }

            // Reset the world matrix.
            worldMatrix = direct3D.WorldMatrix;

            // Turn the Z buffer back and back face culling on.
            direct3D.TurnZBufferOn();
            direct3D.TurnOnCulling();

            // Turn on wire frame rendering of the terrain if needed.
            if (WireFrame)
            {
                direct3D.EnableWireFrame();
            }

            // Render the terrain cells (and cell lines if needed).
            for (int i = 0; i < Terrain.m_CellCount; i++)
            {
                // Render each terrain cell if it is visible only.
                if (Terrain.RenderCell(direct3D.DeviceContext, i, Frustum))
                {
                    // Render the cell buffers using the terrain shader.
                    if (!shaderManager.RenderTerrainShader(direct3D.DeviceContext, Terrain.GetCellIndexCount(i), worldMatrix, viewCameraMatrix, projectionMatrix, textureManager.TextureArray[0].TextureResource, textureManager.TextureArray[1].TextureResource, textureManager.TextureArray[2].TextureResource, Light.Direction, Light.DiffuseColour))
                    {
                        return(false);
                    }

                    // If needed then render the bounding box around this terrain cell using the color shader.
                    if (CellLines)
                    {
                        Terrain.RenderCellLines(direct3D.DeviceContext, i);
                        if (!shaderManager.RenderColorShader(direct3D.DeviceContext, Terrain.GetCellLinesIndexCount(i), worldMatrix, viewCameraMatrix, projectionMatrix))
                        {
                            return(false);
                        }
                    }
                }
            }

            // Turn off wire frame rendering of the terrain if it was on.
            if (WireFrame)
            {
                direct3D.DisableWireFrame();
            }

            // Update the render counts in the UI.
            if (!UserInterface.UpdateRenderCountStrings(Terrain.m_renderCount, Terrain.m_cellsDrawn, Terrain.m_cellsCulled, direct3D.DeviceContext))
            {
                return(false);
            }

            // Render the user interface.
            if (DisplayUI)
            {
                if (!UserInterface.Render(direct3D, shaderManager, worldMatrix, baseViewMatrix, orthoMatrix))
                {
                    return(false);
                }
            }

            // Present the rendered scene to the screen.
            direct3D.EndScene();

            return(true);
        }
Exemple #29
0
 public void Draw(float frameTime, Frustum frustum)
 {
     throw new NotImplementedException();
 }
Exemple #30
0
        internal void UpdateViewConstants(bool jitterProjectionMatrix)
        {
            // If TAA is enabled projMatrix will hold a jittered projection matrix. The original,
            // non-jittered projection matrix can be accessed via nonJitteredProjMatrix.
            var nonJitteredCameraProj = camera.projectionMatrix;
            var cameraProj            = jitterProjectionMatrix
                ? GetJitteredProjectionMatrix(nonJitteredCameraProj)
                : nonJitteredCameraProj;

            // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
            // (different Z value ranges etc.)
            var gpuProj            = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false'
            var gpuView            = camera.worldToCameraMatrix;
            var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);

            if (camera.stereoEnabled)
            {
                for (uint eyeIndex = 0; eyeIndex < 2; eyeIndex++)
                {
                    // For VR, TAA proj matrices don't need to be jittered
                    var currProjStereo    = camera.GetStereoProjectionMatrix((Camera.StereoscopicEye)eyeIndex);
                    var gpuCurrProjStereo = GL.GetGPUProjectionMatrix(currProjStereo, true);
                    var gpuCurrViewStereo = camera.GetStereoViewMatrix((Camera.StereoscopicEye)eyeIndex);

                    if (ShaderConfig.s_CameraRelativeRendering != 0)
                    {
                        // Zero out the translation component.
                        gpuCurrViewStereo.SetColumn(3, new Vector4(0, 0, 0, 1));
                    }
                    var gpuCurrVPStereo = gpuCurrProjStereo * gpuCurrViewStereo;

                    // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
                    if (m_LastFrameActive != Time.frameCount)
                    {
                        if (isFirstFrame)
                        {
                            prevWorldSpaceCameraPosStereo[eyeIndex] = gpuCurrViewStereo.inverse.GetColumn(3);
                            prevViewProjMatrixStereo[eyeIndex]      = gpuCurrVPStereo;
                        }
                        else
                        {
                            prevWorldSpaceCameraPosStereo[eyeIndex] = worldSpaceCameraPosStereo[eyeIndex];
                            prevViewProjMatrixStereo[eyeIndex]      = GetViewProjMatrixStereo(eyeIndex); // Grabbing this before ConfigureStereoMatrices updates view/proj
                        }

                        isFirstFrame = false;
                    }
                }

                // XRTODO: fix this
                isFirstFrame = true; // So that mono vars can still update when stereo active
            }

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                // Zero out the translation component.
                gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
            }

            var gpuVP = gpuNonJitteredProj * gpuView;

            // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
            // Note: if your first rendered view during the frame is not the Game view, everything breaks.
            if (m_LastFrameActive != Time.frameCount)
            {
                if (isFirstFrame)
                {
                    prevWorldSpaceCameraPos = camera.transform.position;
                    prevViewProjMatrix      = gpuVP;
                }
                else
                {
                    prevWorldSpaceCameraPos         = worldSpaceCameraPos;
                    prevViewProjMatrix              = nonJitteredViewProjMatrix;
                    prevViewProjMatrixNoCameraTrans = prevViewProjMatrix;
                }

                isFirstFrame = false;
            }

            // In stereo, this corresponds to the center eye position
            worldSpaceCameraPos = camera.transform.position;

            viewMatrix            = gpuView;
            projMatrix            = gpuProj;
            nonJitteredProjMatrix = gpuNonJitteredProj;

            ConfigureStereoMatrices();

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                prevWorldSpaceCameraPos = worldSpaceCameraPos - prevWorldSpaceCameraPos;
                // This fixes issue with cameraDisplacement stacking in prevViewProjMatrix when same camera renders multiple times each logical frame
                // causing glitchy motion blur when editor paused.
                if (m_LastFrameActive != Time.frameCount)
                {
                    Matrix4x4 cameraDisplacement = Matrix4x4.Translate(prevWorldSpaceCameraPos);
                    prevViewProjMatrix *= cameraDisplacement; // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS
                }
            }
            else
            {
                Matrix4x4 noTransViewMatrix = camera.worldToCameraMatrix;
                noTransViewMatrix.SetColumn(3, new Vector4(0, 0, 0, 1));
                prevViewProjMatrixNoCameraTrans = nonJitteredProjMatrix * noTransViewMatrix;
            }

            float n = camera.nearClipPlane;
            float f = camera.farClipPlane;

            // Analyze the projection matrix.
            // p[2][3] = (reverseZ ? 1 : -1) * (depth_0_1 ? 1 : 2) * (f * n) / (f - n)
            float scale     = projMatrix[2, 3] / (f * n) * (f - n);
            bool  depth_0_1 = Mathf.Abs(scale) < 1.5f;
            bool  reverseZ  = scale > 0;
            bool  flipProj  = projMatrix.inverse.MultiplyPoint(new Vector3(0, 1, 0)).y < 0;

            // http://www.humus.name/temp/Linearize%20depth.txt
            if (reverseZ)
            {
                zBufferParams = new Vector4(-1 + f / n, 1, -1 / f + 1 / n, 1 / f);
            }
            else
            {
                zBufferParams = new Vector4(1 - f / n, f / n, 1 / f - 1 / n, 1 / n);
            }

            projectionParams = new Vector4(flipProj ? -1 : 1, n, f, 1.0f / f);

            float orthoHeight = camera.orthographic ? 2 * camera.orthographicSize : 0;
            float orthoWidth  = orthoHeight * camera.aspect;

            unity_OrthoParams = new Vector4(orthoWidth, orthoHeight, 0, camera.orthographic ? 1 : 0);

            Frustum.Create(frustum, viewProjMatrix, depth_0_1, reverseZ);

            // Left, right, top, bottom, near, far.
            for (int i = 0; i < 6; i++)
            {
                frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance);
            }
        }
Exemple #31
0
        public void FrustumIntersetAABBTests()
        {
            {
                Frustum frustum = new Frustum(
                    new Plane(new Vector3(1, 0, 0), 20),
                    new Plane(new Vector3(-1, 0, 0), 20),
                    new Plane(new Vector3(0, 1, 0), 20),
                    new Plane(new Vector3(0, -1, 0), 20),
                    new Plane(new Vector3(0, 0, 1), 20),
                    new Plane(new Vector3(0, 0, -1), 20));

                // outside to left
                {
                    AxisAlignedBoundingBox aabb         = new AxisAlignedBoundingBox(new Vector3(-30, -10, -10), new Vector3(-25, 10, 10));
                    FrustumIntersection    intersection = frustum.GetIntersect(aabb);
                    Assert.IsTrue(intersection == FrustumIntersection.Outside);
                }

                // intersect
                {
                    AxisAlignedBoundingBox aabb         = new AxisAlignedBoundingBox(new Vector3(-25, 0, -10), new Vector3(-15, 10, 10));
                    FrustumIntersection    intersection = frustum.GetIntersect(aabb);
                    Assert.IsTrue(intersection == FrustumIntersection.Intersect);
                }

                // not intersect
                {
                    AxisAlignedBoundingBox aabb         = new AxisAlignedBoundingBox(new Vector3(-25, 0, 30), new Vector3(-15, 10, 35));
                    FrustumIntersection    intersection = frustum.GetIntersect(aabb);
                    Assert.IsTrue(intersection == FrustumIntersection.Outside);
                }

                // inside
                {
                    AxisAlignedBoundingBox aabb         = new AxisAlignedBoundingBox(new Vector3(-5, -5, -5), new Vector3(5, 5, 5));
                    FrustumIntersection    intersection = frustum.GetIntersect(aabb);
                    Assert.IsTrue(intersection == FrustumIntersection.Inside);
                }
            }

            {
                Frustum frustum = new Frustum(
                    new Plane(new Vector3(-1, -1, 0), 0),
                    new Plane(new Vector3(1, -1, 0), 0),
                    new Plane(new Vector3(0, -1, -1), 0),
                    new Plane(new Vector3(0, -1, 1), 0),
                    new Plane(new Vector3(0, -1, 0), 0),
                    new Plane(new Vector3(0, 1, 0), 10000));

                // outside to left
                {
                    AxisAlignedBoundingBox aabb         = new AxisAlignedBoundingBox(new Vector3(-110, 0, -10), new Vector3(-100, 10, 10));
                    FrustumIntersection    intersection = frustum.GetIntersect(aabb);
                    Assert.IsTrue(intersection == FrustumIntersection.Outside);
                }

                // intersect with origin (front)
                {
                    AxisAlignedBoundingBox aabb         = new AxisAlignedBoundingBox(new Vector3(-10, -10, -10), new Vector3(10, 10, 10));
                    FrustumIntersection    intersection = frustum.GetIntersect(aabb);
                    Assert.IsTrue(intersection == FrustumIntersection.Intersect);
                }

                // inside
                {
                    AxisAlignedBoundingBox aabb         = new AxisAlignedBoundingBox(new Vector3(-5, 100, -5), new Vector3(5, 110, 5));
                    FrustumIntersection    intersection = frustum.GetIntersect(aabb);
                    Assert.IsTrue(intersection == FrustumIntersection.Inside);
                }
            }

            {
                // looking down -z
                Frustum frustum5PlaneNegZ = new Frustum(
                    new Vector3(-1, 0, 1),
                    new Vector3(-1, 0, 1),
                    new Vector3(0, 1, 1),
                    new Vector3(0, -1, 1),
                    new Vector3(0, 0, -1), 10000);

                // outside to left
                {
                    AxisAlignedBoundingBox aabb         = new AxisAlignedBoundingBox(new Vector3(-110, 0, -10), new Vector3(-100, 10, 10));
                    FrustumIntersection    intersection = frustum5PlaneNegZ.GetIntersect(aabb);
                    Assert.IsTrue(intersection == FrustumIntersection.Outside);
                }

                // intersect with origin (front)
                {
                    AxisAlignedBoundingBox aabb         = new AxisAlignedBoundingBox(new Vector3(-10, -10, -10), new Vector3(10, 10, 10));
                    FrustumIntersection    intersection = frustum5PlaneNegZ.GetIntersect(aabb);
                    Assert.IsTrue(intersection == FrustumIntersection.Intersect);
                }

                // inside
                {
                    AxisAlignedBoundingBox aabb         = new AxisAlignedBoundingBox(new Vector3(-5, -5, -110), new Vector3(5, 5, -100));
                    FrustumIntersection    intersection = frustum5PlaneNegZ.GetIntersect(aabb);
                    Assert.IsTrue(intersection == FrustumIntersection.Inside);
                }
            }
        }
Exemple #32
0
        // Pass all the systems that may want to update per-camera data here.
        // That way you will never update an HDCamera and forget to update the dependent system.
        public void Update(FrameSettings currentFrameSettings, PostProcessLayer postProcessLayer, VolumetricLightingSystem vlSys, MSAASamples msaaSamples)
        {
            // store a shortcut on HDAdditionalCameraData (done here and not in the constructor as
            // we don't create HDCamera at every frame and user can change the HDAdditionalData later (Like when they create a new scene).
            m_AdditionalCameraData = camera.GetComponent <HDAdditionalCameraData>();

            m_frameSettings = currentFrameSettings;

            // In stereo, this corresponds to the center eye position
            var pos = camera.transform.position;

            worldSpaceCameraPos = pos;

            // If TAA is enabled projMatrix will hold a jittered projection matrix. The original,
            // non-jittered projection matrix can be accessed via nonJitteredProjMatrix.
            bool taaEnabled = camera.cameraType == CameraType.Game &&
                              HDUtils.IsTemporalAntialiasingActive(postProcessLayer) &&
                              m_frameSettings.enablePostprocess;

            var nonJitteredCameraProj = camera.projectionMatrix;
            var cameraProj            = taaEnabled
                ? postProcessLayer.temporalAntialiasing.GetJitteredProjectionMatrix(camera)
                : nonJitteredCameraProj;

            // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
            // (different Z value ranges etc.)
            var gpuProj            = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false'
            var gpuView            = camera.worldToCameraMatrix;
            var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);

            m_ActualWidth  = camera.pixelWidth;
            m_ActualHeight = camera.pixelHeight;
            var screenWidth  = m_ActualWidth;
            var screenHeight = m_ActualHeight;

            textureWidthScaling = new Vector4(1.0f, 1.0f, 0.0f, 0.0f);

            numEyes = m_frameSettings.enableStereo ? (uint)2 : (uint)1; // TODO VR: Generalize this when support for >2 eyes comes out with XR SDK

            if (m_frameSettings.enableStereo)
            {
                textureWidthScaling = new Vector4(2.0f, 0.5f, 0.0f, 0.0f);
                for (uint eyeIndex = 0; eyeIndex < 2; eyeIndex++)
                {
                    // For VR, TAA proj matrices don't need to be jittered
                    var currProjStereo    = camera.GetStereoProjectionMatrix((Camera.StereoscopicEye)eyeIndex);
                    var gpuCurrProjStereo = GL.GetGPUProjectionMatrix(currProjStereo, true);
                    var gpuCurrViewStereo = camera.GetStereoViewMatrix((Camera.StereoscopicEye)eyeIndex);

                    if (ShaderConfig.s_CameraRelativeRendering != 0)
                    {
                        // Zero out the translation component.
                        gpuCurrViewStereo.SetColumn(3, new Vector4(0, 0, 0, 1));
                    }
                    var gpuCurrVPStereo = gpuCurrProjStereo * gpuCurrViewStereo;

                    // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
                    if (m_LastFrameActive != Time.frameCount)
                    {
                        if (isFirstFrame)
                        {
                            prevViewProjMatrixStereo[eyeIndex] = gpuCurrVPStereo;
                        }
                        else
                        {
                            prevViewProjMatrixStereo[eyeIndex] = GetViewProjMatrixStereo(eyeIndex); // Grabbing this before ConfigureStereoMatrices updates view/proj
                        }

                        isFirstFrame = false;
                    }
                }
                isFirstFrame = true; // So that mono vars can still update when stereo active

                screenWidth  = XRGraphicsConfig.eyeTextureWidth;
                screenHeight = XRGraphicsConfig.eyeTextureHeight;

                var xrDesc = XRGraphicsConfig.eyeTextureDesc;
                m_ActualWidth  = xrDesc.width;
                m_ActualHeight = xrDesc.height;

                ConfigureStereoMatrices();
            }

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                // Zero out the translation component.
                gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
            }

            var gpuVP = gpuNonJitteredProj * gpuView;

            // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
            if (m_LastFrameActive != Time.frameCount)
            {
                if (isFirstFrame)
                {
                    prevCameraPos      = pos;
                    prevViewProjMatrix = gpuVP;
                }
                else
                {
                    prevCameraPos      = cameraPos;
                    prevViewProjMatrix = nonJitteredViewProjMatrix;
                }

                isFirstFrame = false;
            }

            taaFrameIndex    = taaEnabled ? (uint)postProcessLayer.temporalAntialiasing.sampleIndex : 0;
            taaFrameRotation = new Vector2(Mathf.Sin(taaFrameIndex * (0.5f * Mathf.PI)),
                                           Mathf.Cos(taaFrameIndex * (0.5f * Mathf.PI)));

            viewMatrix            = gpuView;
            projMatrix            = gpuProj;
            nonJitteredProjMatrix = gpuNonJitteredProj;
            cameraPos             = pos;

            if (!m_frameSettings.enableStereo)
            {
                // TODO VR: Current solution for compute shaders grabs matrices from
                // stereo matrices even when not rendering stereo in order to reduce shader variants.
                // After native fix for compute shader keywords is completed, qualify this with stereoEnabled.
                viewMatrixStereo[0] = viewMatrix;
                projMatrixStereo[0] = projMatrix;
            }

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                Matrix4x4 cameraDisplacement = Matrix4x4.Translate(cameraPos - prevCameraPos); // Non-camera-relative positions
                prevViewProjMatrix *= cameraDisplacement;                                      // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS
            }

            float n = camera.nearClipPlane;
            float f = camera.farClipPlane;

            // Analyze the projection matrix.
            // p[2][3] = (reverseZ ? 1 : -1) * (depth_0_1 ? 1 : 2) * (f * n) / (f - n)
            float scale     = projMatrix[2, 3] / (f * n) * (f - n);
            bool  depth_0_1 = Mathf.Abs(scale) < 1.5f;
            bool  reverseZ  = scale > 0;
            bool  flipProj  = projMatrix.inverse.MultiplyPoint(new Vector3(0, 1, 0)).y < 0;

            // http://www.humus.name/temp/Linearize%20depth.txt
            if (reverseZ)
            {
                zBufferParams = new Vector4(-1 + f / n, 1, -1 / f + 1 / n, 1 / f);
            }
            else
            {
                zBufferParams = new Vector4(1 - f / n, f / n, 1 / f - 1 / n, 1 / n);
            }

            projectionParams = new Vector4(flipProj ? -1 : 1, n, f, 1.0f / f);

            float orthoHeight = camera.orthographic ? 2 * camera.orthographicSize : 0;
            float orthoWidth  = orthoHeight * camera.aspect;

            unity_OrthoParams = new Vector4(orthoWidth, orthoHeight, 0, camera.orthographic ? 1 : 0);

            Frustum.Create(frustum, viewProjMatrix, depth_0_1, reverseZ);

            // Left, right, top, bottom, near, far.
            for (int i = 0; i < 6; i++)
            {
                frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance);
            }

            m_LastFrameActive = Time.frameCount;

            Vector2 lastTextureSize = new Vector2(RTHandles.maxWidth, RTHandles.maxHeight);

            // Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
            m_msaaSamples = msaaSamples;
            RTHandles.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_msaaSamples);
            m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_msaaSamples);
            m_HistoryRTSystem.Swap();

            int maxWidth  = RTHandles.maxWidth;
            int maxHeight = RTHandles.maxHeight;

            Vector2 lastByCurrentTextureSizeRatio = lastTextureSize / new Vector2(maxWidth, maxHeight);

            // Double-buffer. Note: this should be (LastViewportSize / CurrentTextureSize).
            m_ViewportScalePreviousFrame  = m_ViewportScaleCurrentFrame * lastByCurrentTextureSizeRatio;
            m_ViewportScaleCurrentFrame.x = (float)m_ActualWidth / maxWidth;
            m_ViewportScaleCurrentFrame.y = (float)m_ActualHeight / maxHeight;

            screenSize   = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight);
            screenParams = new Vector4(screenSize.x, screenSize.y, 1 + screenSize.z, 1 + screenSize.w);

            if (vlSys != null)
            {
                vlSys.UpdatePerCameraData(this);
            }

            UpdateVolumeParameters();
        }
		ConvexPolyhedron GetConvexPolyhedronFromFrustum( ref Frustum frustum )
		{
			Vec3[] points = null;
			frustum.ToPoints( ref points );

			ConvexPolyhedron.Face[] faces = new ConvexPolyhedron.Face[ 12 ];

			faces[ 0 ] = new ConvexPolyhedron.Face( 5, 4, 7 );
			faces[ 1 ] = new ConvexPolyhedron.Face( 7, 6, 5 );
			faces[ 2 ] = new ConvexPolyhedron.Face( 0, 1, 2 );
			faces[ 3 ] = new ConvexPolyhedron.Face( 2, 3, 0 );

			faces[ 4 ] = new ConvexPolyhedron.Face( 4, 0, 3 );
			faces[ 5 ] = new ConvexPolyhedron.Face( 3, 7, 4 );
			faces[ 6 ] = new ConvexPolyhedron.Face( 1, 5, 6 );
			faces[ 7 ] = new ConvexPolyhedron.Face( 6, 2, 1 );

			faces[ 8 ] = new ConvexPolyhedron.Face( 6, 7, 3 );
			faces[ 9 ] = new ConvexPolyhedron.Face( 3, 2, 6 );
			faces[ 10 ] = new ConvexPolyhedron.Face( 4, 5, 1 );
			faces[ 11 ] = new ConvexPolyhedron.Face( 1, 0, 4 );

			return new ConvexPolyhedron( points, faces, .0001f );
		}
Exemple #34
0
        // Pass all the systems that may want to update per-camera data here.
        // That way you will never update an HDCamera and forget to update the dependent system.
        public void Update(FrameSettings currentFrameSettings, PostProcessLayer postProcessLayer, VolumetricLightingSystem vlSys)
        {
            m_frameSettings = currentFrameSettings;

            // If TAA is enabled projMatrix will hold a jittered projection matrix. The original,
            // non-jittered projection matrix can be accessed via nonJitteredProjMatrix.
            bool taaEnabled = camera.cameraType == CameraType.Game &&
                              CoreUtils.IsTemporalAntialiasingActive(postProcessLayer) &&
                              m_frameSettings.enablePostprocess;

            var nonJitteredCameraProj = camera.projectionMatrix;
            var cameraProj            = taaEnabled
                ? postProcessLayer.temporalAntialiasing.GetJitteredProjectionMatrix(camera)
                : nonJitteredCameraProj;

            // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
            // (different Z value ranges etc.)
            var gpuProj            = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false'
            var gpuView            = camera.worldToCameraMatrix;
            var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);

            // In stereo, this corresponds to the center eye position
            var pos = camera.transform.position;

            worldSpaceCameraPos = pos;

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                // Zero out the translation component.
                gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
            }

            var gpuVP = gpuNonJitteredProj * gpuView;

            // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
            if (m_LastFrameActive != Time.frameCount)
            {
                if (isFirstFrame)
                {
                    prevCameraPos      = pos;
                    prevViewProjMatrix = gpuVP;
                }
                else
                {
                    prevCameraPos      = cameraPos;
                    prevViewProjMatrix = nonJitteredViewProjMatrix;
                }

                isFirstFrame = false;
            }

            taaFrameIndex    = taaEnabled ? (uint)postProcessLayer.temporalAntialiasing.sampleIndex : 0;
            taaFrameRotation = new Vector2(Mathf.Sin(taaFrameIndex * (0.5f * Mathf.PI)),
                                           Mathf.Cos(taaFrameIndex * (0.5f * Mathf.PI)));

            viewMatrix            = gpuView;
            projMatrix            = gpuProj;
            nonJitteredProjMatrix = gpuNonJitteredProj;
            cameraPos             = pos;
            detViewMatrix         = viewMatrix.determinant;

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                Matrix4x4 cameraDisplacement = Matrix4x4.Translate(cameraPos - prevCameraPos); // Non-camera-relative positions
                prevViewProjMatrix *= cameraDisplacement;                                      // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS
            }

            float n = camera.nearClipPlane;
            float f = camera.farClipPlane;

            // Analyze the projection matrix.
            // p[2][3] = (reverseZ ? 1 : -1) * (depth_0_1 ? 1 : 2) * (f * n) / (f - n)
            float scale     = projMatrix[2, 3] / (f * n) * (f - n);
            bool  depth_0_1 = Mathf.Abs(scale) < 1.5f;
            bool  reverseZ  = scale > 0;
            bool  flipProj  = projMatrix.inverse.MultiplyPoint(new Vector3(0, 1, 0)).y < 0;

            // http://www.humus.name/temp/Linearize%20depth.txt
            if (reverseZ)
            {
                zBufferParams = new Vector4(-1 + f / n, 1, -1 / f + 1 / n, 1 / f);
            }
            else
            {
                zBufferParams = new Vector4(1 - f / n, f / n, 1 / f - 1 / n, 1 / n);
            }

            projectionParams = new Vector4(flipProj ? -1 : 1, n, f, 1.0f / f);

            float orthoHeight = camera.orthographic ? 2 * camera.orthographicSize : 0;
            float orthoWidth  = orthoHeight * camera.aspect;

            unity_OrthoParams = new Vector4(orthoWidth, orthoHeight, 0, camera.orthographic ? 1 : 0);

            frustum = Frustum.Create(viewProjMatrix, depth_0_1, reverseZ);

            // Left, right, top, bottom, near, far.
            for (int i = 0; i < 6; i++)
            {
                frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance);
            }

            m_LastFrameActive = Time.frameCount;

            m_ActualWidth  = camera.pixelWidth;
            m_ActualHeight = camera.pixelHeight;
            var screenWidth  = m_ActualWidth;
            var screenHeight = m_ActualHeight;

#if !UNITY_SWITCH
            if (m_frameSettings.enableStereo)
            {
                screenWidth  = XRSettings.eyeTextureWidth;
                screenHeight = XRSettings.eyeTextureHeight;

                var xrDesc = XRSettings.eyeTextureDesc;
                m_ActualWidth  = xrDesc.width;
                m_ActualHeight = xrDesc.height;

                ConfigureStereoMatrices();
            }
#endif

            // Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
            m_msaaSamples = HDUtils.hdrpSettings != null ? HDUtils.hdrpSettings.msaaSampleCount : MSAASamples.None;
            RTHandles.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_frameSettings.enableMSAA, m_msaaSamples);
            m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_frameSettings.enableMSAA, m_msaaSamples);
            m_HistoryRTSystem.Swap();

            int maxWidth  = RTHandles.maxWidth;
            int maxHeight = RTHandles.maxHeight;
            m_ViewportScalePreviousFrame  = m_ViewportScaleCurrentFrame; // Double-buffer
            m_ViewportScaleCurrentFrame.x = (float)m_ActualWidth / maxWidth;
            m_ViewportScaleCurrentFrame.y = (float)m_ActualHeight / maxHeight;

            screenSize   = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight);
            screenParams = new Vector4(screenSize.x, screenSize.y, 1 + screenSize.z, 1 + screenSize.w);

            if (vlSys != null)
            {
                vlSys.UpdatePerCameraData(this);
            }
        }
 public FrustumClipTerrainModule(PlanarTerrain terrain)
     : base(terrain)
 {
     Frustum = new Frustum(Matrix4d.Identity);
 }
Exemple #36
0
        public void Update(PostProcessLayer postProcessLayer, FrameSettings frameSettings)
        {
            // If TAA is enabled projMatrix will hold a jittered projection matrix. The original,
            // non-jittered projection matrix can be accessed via nonJitteredProjMatrix.
            bool taaEnabled = camera.cameraType == CameraType.Game &&
                              CoreUtils.IsTemporalAntialiasingActive(postProcessLayer) &&
                              frameSettings.enablePostprocess;

            var nonJitteredCameraProj = camera.projectionMatrix;
            var cameraProj            = taaEnabled
                ? postProcessLayer.temporalAntialiasing.GetJitteredProjectionMatrix(camera)
                : nonJitteredCameraProj;

            // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
            // (different Z value ranges etc.)
            var gpuProj            = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false'
            var gpuView            = camera.worldToCameraMatrix;
            var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);

            // In stereo, this corresponds to the center eye position
            var pos = camera.transform.position;

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                // Zero out the translation component.
                gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
            }

            var gpuVP = gpuNonJitteredProj * gpuView;

            // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
            if (m_LastFrameActive != Time.frameCount)
            {
                if (isFirstFrame)
                {
                    prevCameraPos      = pos;
                    prevViewProjMatrix = gpuVP;
                }
                else
                {
                    prevCameraPos      = cameraPos;
                    prevViewProjMatrix = nonJitteredViewProjMatrix;
                }

                isFirstFrame = false;
            }

            taaFrameIndex    = taaEnabled ? (uint)postProcessLayer.temporalAntialiasing.sampleIndex : 0;
            taaFrameRotation = new Vector2(Mathf.Sin(taaFrameIndex * (0.5f * Mathf.PI)),
                                           Mathf.Cos(taaFrameIndex * (0.5f * Mathf.PI)));

            viewMatrix            = gpuView;
            projMatrix            = gpuProj;
            nonJitteredProjMatrix = gpuNonJitteredProj;
            cameraPos             = pos;
            viewParam             = new Vector4(viewMatrix.determinant, 0.0f, 0.0f, 0.0f);

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                Matrix4x4 cameraDisplacement = Matrix4x4.Translate(cameraPos - prevCameraPos); // Non-camera-relative positions
                prevViewProjMatrix *= cameraDisplacement;                                      // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS
            }

            frustum = Frustum.Create(viewProjMatrix, true, true);

            // Left, right, top, bottom, near, far.
            for (int i = 0; i < 6; i++)
            {
                frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance);
            }

            m_LastFrameActive = Time.frameCount;

            m_ActualWidth  = camera.pixelWidth;
            m_ActualHeight = camera.pixelHeight;
            var screenWidth  = m_ActualWidth;
            var screenHeight = m_ActualHeight;

            if (frameSettings.enableStereo)
            {
                screenWidth  = XRSettings.eyeTextureWidth;
                screenHeight = XRSettings.eyeTextureHeight;

                var xrDesc = XRSettings.eyeTextureDesc;
                m_ActualWidth  = xrDesc.width;
                m_ActualHeight = xrDesc.height;

                ConfigureStereoMatrices();
            }

            // Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
            m_msaaSamples = HDUtils.hdrpSettings != null ? HDUtils.hdrpSettings.msaaSampleCount : MSAASamples.None;
            RTHandle.SetReferenceSize(m_ActualWidth, m_ActualHeight, frameSettings.enableMSAA, m_msaaSamples);

            int maxWidth  = RTHandle.maxWidth;
            int maxHeight = RTHandle.maxHeight;

            m_CameraScaleBias.x = (float)m_ActualWidth / maxWidth;
            m_CameraScaleBias.y = (float)m_ActualHeight / maxHeight;

            screenSize = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight);
        }
Exemple #37
0
        public void RenderLevel(Vector3 CameraPosition, Frustum GameFrustrum)
        {
            //Enable states
            //Gl.glEnableClientState(Gl.GL_VERTEX_ARRAY);
            //Gl.glEnableClientState(Gl.GL_TEXTURE_COORD_ARRAY);

            FacesDrawn.SetAll(false);

            //Draw skybox
            //RenderSkyBox(CameraPosition);

            //Get current leaf index
            int LeafIndex = FindLeaf(CameraPosition);

            //Get current cluster
            int ClusterIndex = Leaves[LeafIndex].Cluster;

            int i = NumLeaves;

            //Loop through all leaves and check visibility
            while (i > 0)
            {
                i--;

                BSPLeaf CurrentLeaf = Leaves[i];

                if ( IsClusterVisible(ClusterIndex, CurrentLeaf.Cluster) )
                {
                    if (GameFrustrum==null||GameFrustrum.BoxInFrustrum(CurrentLeaf.Min.X, CurrentLeaf.Min.Y, CurrentLeaf.Min.Z,
                        CurrentLeaf.Max.X, CurrentLeaf.Max.Y, CurrentLeaf.Max.Z) )
                    {
                        int FaceCount = CurrentLeaf.NumLeafFaces;

                        while (FaceCount > 0)
                        {
                            FaceCount--;
                            int FaceIndex = LeafFaces[ CurrentLeaf.LeafFace + FaceCount ];

                            if (Faces[FaceIndex] != null)
                            {
                                if (!FacesDrawn.Get(FaceIndex))
                                {
                                    FacesDrawn.Set(FaceIndex, true);
                                    RenderFace(Faces[FaceIndex].Type, FaceIndex);
                                }
                            }
                        }
                    }
                }
            }

            //Disable blending
            //Gl.glDisable(Gl.GL_BLEND);

            //Disable states
            //Gl.glDisableClientState(Gl.GL_TEXTURE_COORD_ARRAY);
            //Gl.glDisableClientState(Gl.GL_VERTEX_ARRAY);
        }
 public FrustumTest GetCullState(Camera camera, Sphere sphere)
 {
     //return Camera.GetCullTest(camera.FrustumPlanes, sphere.Center, sphere.Radius);
     return(Frustum.TestFrustum(LocalFrustum, sphere));
 }
Exemple #39
0
 /// <summary>Get the intersection between a <see cref="Box3i"/> and a <see cref="Frustum"/>.</summary>
 public Containment Intersect(Frustum frustum)
 {
     throw new NotImplementedException();
 }
 public IEnumerable<int> GetQuadsInFrustum(Frustum frustum, bool treestart = false)
 {
     if (treestart || frustum.Check(BoundingBox))
     {
         if (SubContainer == null)
             return new List<int> { ContainedQuad };
         return SubContainer.SelectMany(container => container.GetQuadsInFrustum(frustum)).Where(quadId => quadId != -1);
     }
     return new List<int> { -1 };
 }