internal static void Update() { if (Collector.Collect()) { SoundSources.Collect(); Cursors.Collect(); Texture2Ds.Collect(); CubemapTextures.Collect(); Fonts.Collect(); Chip2Ds.Collect(); Shader2Ds.Collect(); Shader3Ds.Collect(); Material2Ds.Collect(); Material3Ds.Collect(); MaterialPropertyBlocks.Collect(); ImagePackages.Collect(); MediaPlayers.Collect(); Effects.Collect(); Meshs.Collect(); Deformers.Collect(); Models.Collect(); MassModels.Collect(); Terrain3Ds.Collect(); KeyframeAnimations.Collect(); AnimationSources.Collect(); AnimationClips.Collect(); Scenes.Collect(); Layer2Ds.Collect(); Object2Ds.Collect(); Layer3Ds.Collect(); Object3Ds.Collect(); PostEffects.Collect(); Transitions.Collect(); StaticFiles.Collect(); StreamFiles.Collect(); Shapes.Collect(); Collider2Ds.Collect(); } }
protected override unsafe void CollectCore(RenderContext context) { var camera = context.GetCurrentCamera(); if (context.RenderView is null) { throw new NullReferenceException(nameof(context.RenderView) + " is null. Please make sure you have your camera correctly set."); } // Setup pixel formats for RenderStage using (context.SaveRenderOutputAndRestore()) { // Mark this view as requiring shadows shadowMapRenderer?.RenderViewsWithShadows.Add(context.RenderView); context.RenderOutput = new RenderOutputDescription(PostEffects != null ? PixelFormat.R16G16B16A16_Float : context.RenderOutput.RenderTargetFormat0, DepthBufferFormat, MSAALevel); CollectStages(context); // Write params to view SceneCameraRenderer.UpdateCameraToRenderView(context, context.RenderView, camera); CollectView(context); LightShafts?.Collect(context); PostEffects?.Collect(context); // Set depth format for shadow map render stages foreach (var shadowMapRenderStage in ShadowMapRenderStages) { // TODO: This format should be acquired from the ShadowMapRenderer instead of being fixed here if (shadowMapRenderStage != null) { shadowMapRenderStage.Output = new RenderOutputDescription(PixelFormat.None, PixelFormat.D32_Float); } } } PostEffects?.Collect(context); }
protected override unsafe void CollectCore(RenderContext context) { var camera = context.GetCurrentCamera(); if (context.RenderView == null) { throw new NullReferenceException(nameof(context.RenderView) + " is null. Please make sure you have your camera correctly set."); } // Setup pixel formats for RenderStage using (context.SaveRenderOutputAndRestore()) { // Mark this view as requiring shadows shadowMapRenderer?.RenderViewsWithShadows.Add(context.RenderView); context.RenderOutput = new RenderOutputDescription(PostEffects != null ? PixelFormat.R16G16B16A16_Float : context.RenderOutput.RenderTargetFormat0, DepthBufferFormat, MSAALevel); CollectStages(context); if (VRSettings.Enabled && VRSettings.VRDevice != null) { Vector3 cameraPos, cameraScale; Matrix cameraRot; if (!vrSystem.PreviousUseCustomViewMatrix) { camera.Entity.Transform.WorldMatrix.Decompose(out cameraScale, out cameraRot, out cameraPos); } else { camera.ViewMatrix.Decompose(out cameraScale, out cameraRot, out cameraPos); cameraRot.Transpose(); Vector3.Negate(ref cameraPos, out cameraPos); Vector3.TransformCoordinate(ref cameraPos, ref cameraRot, out cameraPos); } if (VRSettings.IgnoreCameraRotation) { cameraRot = Matrix.Identity; } // Compute both view and projection matrices Matrix *viewMatrices = stackalloc Matrix[2]; Matrix *projectionMatrices = stackalloc Matrix[2]; for (var i = 0; i < 2; ++i) { VRSettings.VRDevice.ReadEyeParameters(i == 0 ? Eyes.Left : Eyes.Right, camera.NearClipPlane, camera.FarClipPlane, ref cameraPos, ref cameraRot, out viewMatrices[i], out projectionMatrices[i]); } // if the VRDevice disagreed with the near and far plane, we must re-discover them and follow: var near = projectionMatrices[0].M43 / projectionMatrices[0].M33; var far = near * (-projectionMatrices[0].M33 / (-projectionMatrices[0].M33 - 1)); if (Math.Abs(near - camera.NearClipPlane) > 1e-8f) { camera.NearClipPlane = near; } if (Math.Abs(near - camera.FarClipPlane) > 1e-8f) { camera.FarClipPlane = far; } // Compute a view matrix and projection matrix that cover both eyes for shadow map and culling ComputeCommonViewMatrices(context, viewMatrices, projectionMatrices); var commonView = context.RenderView; // Notify lighting system this view only purpose is for shared lighting, it is not being drawn directly. commonView.Flags |= RenderViewFlags.NotDrawn; // Collect now, and use result for both eyes CollectView(context); context.VisibilityGroup.TryCollect(commonView); for (var i = 0; i < 2; i++) { using (context.PushRenderViewAndRestore(VRSettings.RenderViews[i])) using (context.SaveViewportAndRestore()) { context.RenderSystem.Views.Add(context.RenderView); context.RenderView.SceneInstance = commonView.SceneInstance; context.RenderView.LightingView = commonView; context.ViewportState.Viewport0 = new Viewport(0, 0, VRSettings.VRDevice.ActualRenderFrameSize.Width / 2.0f, VRSettings.VRDevice.ActualRenderFrameSize.Height); //change camera params for eye camera.ViewMatrix = viewMatrices[i]; camera.ProjectionMatrix = projectionMatrices[i]; camera.UseCustomProjectionMatrix = true; camera.UseCustomViewMatrix = true; camera.Update(); //write params to view SceneCameraRenderer.UpdateCameraToRenderView(context, context.RenderView, camera); // Copy culling results context.VisibilityGroup.Copy(commonView, context.RenderView); CollectView(context); LightShafts?.Collect(context); PostEffects?.Collect(context); } } if (VRSettings.VRDevice.SupportsOverlays) { foreach (var overlay in VRSettings.Overlays) { if (overlay != null && overlay.Texture != null) { overlay.Overlay.Position = overlay.LocalPosition; overlay.Overlay.Rotation = overlay.LocalRotation; overlay.Overlay.SurfaceSize = overlay.SurfaceSize; overlay.Overlay.FollowHeadRotation = overlay.FollowsHeadRotation; } } } } else { //write params to view SceneCameraRenderer.UpdateCameraToRenderView(context, context.RenderView, camera); CollectView(context); LightShafts?.Collect(context); PostEffects?.Collect(context); } // Set depth format for shadow map render stages // TODO: This format should be acquired from the ShadowMapRenderer instead of being fixed here foreach (var shadowMapRenderStage in ShadowMapRenderStages) { shadowMapRenderStage.Output = new RenderOutputDescription(PixelFormat.None, PixelFormat.D32_Float); } } PostEffects?.Collect(context); }
protected override unsafe void CollectCore(RenderContext context) { var camera = context.GetCurrentCamera(); if (context.RenderView == null) { throw new NullReferenceException(nameof(context.RenderView) + " is null. Please make sure you have your camera correctly set."); } // Setup pixel formats for RenderStage using (context.SaveRenderOutputAndRestore()) { // Mark this view as requiring shadows shadowMapRenderer?.RenderViewsWithShadows.Add(context.RenderView); context.RenderOutput = new RenderOutputDescription(context.RenderOutput.RenderTargetFormat0, DepthBufferFormat, MSAALevel); CollectStages(context); if (VRSettings.Enabled && VRSettings.VRDevice != null) { Matrix *viewMatrices = stackalloc Matrix[2]; Matrix *projectionMatrices = stackalloc Matrix[2]; // only update the camera once, if we have multiple forward renderers on the same camera ulong poseCount = VRSettings.VRDevice.PoseCount; if (poseCount == 0 || poseCount != camera.VRProjectionPose) { camera.VRProjectionPose = poseCount; Vector3 cameraPos, cameraScale; Matrix cameraRot; VRSettings.VRDevice.UpdatePositions(context.Time); if (camera.VRHeadSetsTransform) { if (camera.Entity.Transform.Parent != null) { camera.Entity.Transform.Parent.WorldMatrix.Decompose(out cameraScale, out cameraRot, out cameraPos); } else { cameraPos = Vector3.Zero; cameraScale = Vector3.One; cameraRot = Matrix.Identity; } // make sure camera position gets body scale camera.Entity.Transform.Position = VRSettings.VRDevice.HeadPosition * VRSettings.VRDevice.BodyScaling; camera.Entity.Transform.Rotation = VRSettings.VRDevice.HeadRotation; } else { if (!vrSystem.PreviousUseCustomViewMatrix) { camera.Entity.Transform.WorldMatrix.Decompose(out cameraScale, out cameraRot, out cameraPos); } else { camera.ViewMatrix.Decompose(out cameraScale, out cameraRot, out cameraPos); cameraRot.Transpose(); Vector3.Negate(ref cameraPos, out cameraPos); Vector3.TransformCoordinate(ref cameraPos, ref cameraRot, out cameraPos); } if (VRSettings.IgnoreCameraRotation) { // only remove the local rotation of the camera cameraRot *= Matrix.RotationQuaternion(Quaternion.Invert(camera.Entity.Transform.Rotation)); } } // Compute both view and projection matrices for (var i = 0; i < 2; ++i) { VRSettings.VRDevice.ReadEyeParameters(i == 0 ? Eyes.Left : Eyes.Right, camera.NearClipPlane, camera.FarClipPlane, ref cameraPos, ref cameraRot, VRSettings.IgnoreDeviceRotation, VRSettings.IgnoreDevicePosition, out viewMatrices[i], out projectionMatrices[i]); } // cache these projection values if (camera.cachedVRProjections == null) { camera.cachedVRProjections = new Matrix[4]; } camera.cachedVRProjections[0] = viewMatrices[0]; camera.cachedVRProjections[1] = viewMatrices[1]; camera.cachedVRProjections[2] = projectionMatrices[0]; camera.cachedVRProjections[3] = projectionMatrices[1]; // if the VRDevice disagreed with the near and far plane, we must re-discover them and follow: var near = projectionMatrices[0].M43 / projectionMatrices[0].M33; var far = near * (-projectionMatrices[0].M33 / (-projectionMatrices[0].M33 - 1)); if (Math.Abs(near - camera.NearClipPlane) > 1e-8f) { camera.NearClipPlane = near; } if (Math.Abs(near - camera.FarClipPlane) > 1e-8f) { camera.FarClipPlane = far; } } else { // already calculated this camera, use the cached information viewMatrices[0] = camera.cachedVRProjections[0]; viewMatrices[1] = camera.cachedVRProjections[1]; projectionMatrices[0] = camera.cachedVRProjections[2]; projectionMatrices[1] = camera.cachedVRProjections[3]; } // Compute a view matrix and projection matrix that cover both eyes for shadow map and culling ComputeCommonViewMatrices(context, viewMatrices, projectionMatrices); var commonView = context.RenderView; // Notify lighting system this view only purpose is for shared lighting, it is not being drawn directly. commonView.Flags |= RenderViewFlags.NotDrawn; // Collect now, and use result for both eyes CollectView(context); context.VisibilityGroup.TryCollect(commonView); for (var i = 0; i < 2; i++) { using (context.PushRenderViewAndRestore(VRSettings.RenderViews[i])) using (context.SaveViewportAndRestore()) { context.RenderSystem.Views.Add(context.RenderView); context.RenderView.LightingView = commonView; context.ViewportState.Viewport0 = new Viewport(0, 0, VRSettings.VRDevice.ActualRenderFrameSize.Width / 2.0f, VRSettings.VRDevice.ActualRenderFrameSize.Height); //change camera params for eye camera.ViewMatrix = viewMatrices[i]; camera.ProjectionMatrix = projectionMatrices[i]; camera.UseCustomProjectionMatrix = true; camera.UseCustomViewMatrix = true; camera.Update(); //write params to view SceneCameraRenderer.UpdateCameraToRenderView(context, context.RenderView, camera); // Copy culling results context.VisibilityGroup.Copy(commonView, context.RenderView); CollectView(context); LightShafts?.Collect(context); PostEffects?.Collect(context); } } } else { //write params to view SceneCameraRenderer.UpdateCameraToRenderView(context, context.RenderView, camera); CollectView(context); LightShafts?.Collect(context); PostEffects?.Collect(context); } // Set depth format for shadow map render stages // TODO: This format should be acquired from the ShadowMapRenderer instead of being fixed here foreach (var shadowMapRenderStage in ShadowMapRenderStages) { if (shadowMapRenderStage != null) { shadowMapRenderStage.Output = new RenderOutputDescription(PixelFormat.None, PixelFormat.D32_Float); } } } PostEffects?.Collect(context); }