static PostProcessLayer GetPPLayer(CinemachineBrain brain)
        {
            PostProcessLayer layer = null;

            if (mBrainToLayer.TryGetValue(brain, out layer))
            {
#if UNITY_EDITOR
                // Maybe they added it since we last checked
                if (layer != null || Application.isPlaying)
#endif
                return(layer);
            }
            layer = brain.GetComponent <PostProcessLayer>();
            mBrainToLayer[brain] = layer;
            if (layer != null)
            {
                brain.m_CameraCutEvent.AddListener(OnCameraCut);
            }
            else
            {
                brain.m_CameraCutEvent.RemoveListener(OnCameraCut);
            }
            return(layer);
        }
    private void ApplyPerLevelSettings()
    {
        //Update the volume levels.
        mixer.SetFloat("MasterVolume", Mathf.Lerp(-80f, 0f, settings.masterVolume));
        mixer.SetFloat("MusicVolume", Mathf.Lerp(-80f, 0f, settings.musicVolume));
        mixer.SetFloat("GameplayVolume", Mathf.Lerp(-80f, 0f, settings.gameplayVolume));

        //Get the post processing layer from the main camera.
        PostProcessLayer postProcess = Camera.main.GetComponent <PostProcessLayer>();

        //Check if the anti-aliasing should be enabled.
        if (settings.antiAliasing == 0)
        {
            postProcess.antialiasingMode = PostProcessLayer.Antialiasing.None;
            return;
        }

        else
        {
            postProcess.antialiasingMode = PostProcessLayer.Antialiasing.SubpixelMorphologicalAntialiasing;
        }

        //Set the quality of the anti-aliasing.
        if (settings.antiAliasing == 1)
        {
            postProcess.subpixelMorphologicalAntialiasing.quality = SubpixelMorphologicalAntialiasing.Quality.Low;
        }
        if (settings.antiAliasing == 2)
        {
            postProcess.subpixelMorphologicalAntialiasing.quality = SubpixelMorphologicalAntialiasing.Quality.Medium;
        }
        if (settings.antiAliasing == 3)
        {
            postProcess.subpixelMorphologicalAntialiasing.quality = SubpixelMorphologicalAntialiasing.Quality.High;
        }
    }
Esempio n. 3
0
        private void AddRecorderToCamera()
        {
            if (_recorder == null && CaptureCamera)
            {
                _recorder = CaptureCamera.GetComponent <Recorder>();
                if (_recorder == null)
                {
                    _recorder = CaptureCamera.gameObject.AddComponent <Recorder>();
                    _recorder.Init();

                    PostProcessLayer pp = Camera.main.GetComponent <PostProcessLayer>();
                    if (pp != null)
                    {
                        originalFinalBlitToCameraTarget = pp.finalBlitToCameraTarget;
                        pp.finalBlitToCameraTarget      = false;
                    }
                    removeRecorder = true;
                }
                else
                {
                    removeRecorder = false;
                }
            }
        }
Esempio n. 4
0
        public override void OnGraphStart(Playable playable)
        {
            base.OnGraphStart(playable);
            if (profile == null)
            {
                PostProcessLayer postLayer = null;

                if (Camera.main != null)
                {
                    postLayer = Camera.main.gameObject.GetComponentInChildren <PostProcessLayer>();
                }

                if (postLayer == null)
                {
                    var allCam = Camera.allCameras;
                    foreach (var cam in allCam)
                    {
                        var pl = cam.gameObject.GetComponentInChildren <PostProcessLayer>();
                        if (pl != null)
                        {
                            postLayer = pl;
                            break;
                        }
                    }
                }

                if (postLayer == null)
                {
                    return;
                }


                PostProcessVolume volume = PostProcessManager.instance.GetHighestPriorityVolume(postLayer);
                profile = volume.sharedProfile;
            }
        }
Esempio n. 5
0
    protected override void Render(ScriptableRenderContext context, Camera[] cameras)
    {
        BeginFrameRendering(context, cameras);

        foreach (Camera camera in cameras)
        {
            BeginCameraRendering(context, camera);

            //Culling
            ScriptableCullingParameters cullingParams;
            if (!camera.TryGetCullingParameters(out cullingParams))
            {
                continue;
            }
            CullingResults cull = context.Cull(ref cullingParams);

            //Camera setup some builtin variables e.g. camera projection matrices etc
            context.SetupCameraProperties(camera);

            //Get the setting from camera component
            bool drawSkyBox = camera.clearFlags == CameraClearFlags.Skybox? true : false;
            bool clearDepth = camera.clearFlags == CameraClearFlags.Nothing? false : true;
            bool clearColor = camera.clearFlags == CameraClearFlags.Color? true : false;

            //************************** Set TempRT ************************************

            CommandBuffer cmdTempId = new CommandBuffer();
            cmdTempId.name = "(" + camera.name + ")" + "Setup TempRT";

            //Color
            m_ColorFormatActive = camera.allowHDR ? m_ColorFormatHDR : m_ColorFormat;
            RenderTextureDescriptor colorRTDesc = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
            colorRTDesc.graphicsFormat    = m_ColorFormatActive;
            colorRTDesc.depthBufferBits   = depthBufferBits;
            colorRTDesc.sRGB              = (QualitySettings.activeColorSpace == ColorSpace.Linear);
            colorRTDesc.msaaSamples       = camera.allowMSAA ? QualitySettings.antiAliasing : 1;
            colorRTDesc.enableRandomWrite = false;
            cmdTempId.GetTemporaryRT(m_ColorRTid, colorRTDesc, FilterMode.Bilinear);

            //Depth
            RenderTextureDescriptor depthRTDesc = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
            depthRTDesc.colorFormat     = RenderTextureFormat.Depth;
            depthRTDesc.depthBufferBits = depthBufferBits;
            cmdTempId.GetTemporaryRT(m_DepthRTid, depthRTDesc, FilterMode.Bilinear);
            context.ExecuteCommandBuffer(cmdTempId);
            cmdTempId.Release();

            //************************** Setup DrawSettings and FilterSettings ************************************

            var               sortingSettings   = new SortingSettings(camera);
            DrawingSettings   drawSettings      = new DrawingSettings(m_PassName, sortingSettings);
            FilteringSettings filterSettings    = new FilteringSettings(RenderQueueRange.all);
            DrawingSettings   drawSettingsDepth = new DrawingSettings(m_PassName, sortingSettings)
            {
                perObjectData             = PerObjectData.None,
                overrideMaterial          = depthOnlyMaterial,
                overrideMaterialPassIndex = 0
            };

            //************************** Rendering depth ************************************

            //Set RenderTarget & Camera clear flag
            CommandBuffer cmdDepth = new CommandBuffer();
            cmdDepth.name = "(" + camera.name + ")" + "Depth Clear Flag";
            cmdDepth.SetRenderTarget(m_DepthRT); //Set CameraTarget to the depth texture

            cmdDepth.ClearRenderTarget(true, true, Color.black);
            //MyDebug(camera,context,m_DepthRT,colorRTDesc,null);
            context.ExecuteCommandBuffer(cmdDepth);
            //MyDebug(camera,context,m_DepthRT,colorRTDesc,null);
            cmdDepth.Release();

            //Opaque objects
            sortingSettings.criteria          = SortingCriteria.CommonOpaque;
            drawSettingsDepth.sortingSettings = sortingSettings;
            filterSettings.renderQueueRange   = RenderQueueRange.opaque;
            context.DrawRenderers(cull, ref drawSettingsDepth, ref filterSettings);

            //To let shader has _CameraDepthTexture, to make Depth of Field work
            CommandBuffer cmdDepthTexture = new CommandBuffer();
            cmdDepthTexture.name = "(" + camera.name + ")" + "Depth Texture";
            cmdDepthTexture.SetGlobalTexture(m_DepthRTid, m_DepthRT);
            context.ExecuteCommandBuffer(cmdDepthTexture);
            //MyDebug(camera,context,m_DepthRT,colorRTDesc,null);
            cmdDepthTexture.Release();

            //************************** Rendering colors ************************************

            //Set RenderTarget & Camera clear flag
            CommandBuffer cmd = new CommandBuffer();
            cmd.name = "(" + camera.name + ")" + "Clear Flag";
            cmd.SetRenderTarget(m_ColorRT); //Set CameraTarget to the color texture
            cmd.ClearRenderTarget(clearDepth, clearColor, camera.backgroundColor);
            context.ExecuteCommandBuffer(cmd);
            //MyDebug(camera,context,m_ColorRT,colorRTDesc,null);
            cmd.Release();

            //Skybox
            if (drawSkyBox)
            {
                context.DrawSkybox(camera);
            }

            AfterSkybox(camera, context);
            //************************** Rendering Opaque Objects ************************************

            sortingSettings.criteria        = SortingCriteria.CommonOpaque;
            drawSettings.sortingSettings    = sortingSettings;
            filterSettings.renderQueueRange = RenderQueueRange.opaque;
            context.DrawRenderers(cull, ref drawSettings, ref filterSettings);
            AfterOpaqueObject(camera, context);
            //************************** SetUp Post-processing ************************************

            PostProcessLayer         m_CameraPostProcessLayer   = camera.GetComponent <PostProcessLayer>();
            bool                     hasPostProcessing          = m_CameraPostProcessLayer != null;
            bool                     usePostProcessing          = false;
            bool                     hasOpaqueOnlyEffects       = false;
            PostProcessRenderContext m_PostProcessRenderContext = null;
            if (hasPostProcessing)
            {
                m_PostProcessRenderContext = new PostProcessRenderContext();
                usePostProcessing          = m_CameraPostProcessLayer.enabled;
                hasOpaqueOnlyEffects       = m_CameraPostProcessLayer.HasOpaqueOnlyEffects(m_PostProcessRenderContext);
            }

            //************************** Opaque Post-processing ************************************
            //Ambient Occlusion, Screen-spaced reflection are generally not supported for SRP
            //So this part is only for custom opaque post-processing
            if (usePostProcessing)
            {
                CommandBuffer cmdpp = new CommandBuffer();
                cmdpp.name = "(" + camera.name + ")" + "Post-processing Opaque";

                m_PostProcessRenderContext.Reset();
                m_PostProcessRenderContext.camera       = camera;
                m_PostProcessRenderContext.source       = m_ColorRT;
                m_PostProcessRenderContext.sourceFormat = UnityEngine.Experimental.Rendering.GraphicsFormatUtility.GetRenderTextureFormat(m_ColorFormatActive);
                m_PostProcessRenderContext.destination  = m_ColorRT;
                m_PostProcessRenderContext.command      = cmdpp;
                m_PostProcessRenderContext.flip         = camera.targetTexture == null;//是否反转画面
                m_CameraPostProcessLayer.RenderOpaqueOnly(m_PostProcessRenderContext);

                context.ExecuteCommandBuffer(cmdpp);
                cmdpp.Release();
            }

            //************************** Rendering Transparent Objects ************************************

            sortingSettings.criteria        = SortingCriteria.CommonTransparent;
            drawSettings.sortingSettings    = sortingSettings;
            filterSettings.renderQueueRange = RenderQueueRange.transparent;
            context.DrawRenderers(cull, ref drawSettings, ref filterSettings);
            AfterTransparentObject(camera, context, m_DepthRTid, depthRTDesc);
            //************************** Transparent Post-processing ************************************
            //Bloom, Vignette, Grain, ColorGrading, LensDistortion, Chromatic Aberration, Auto Exposure
            if (usePostProcessing)
            {
                CommandBuffer cmdpp = new CommandBuffer();
                cmdpp.name = "(" + camera.name + ")" + "Post-processing Transparent";

                m_PostProcessRenderContext.Reset();
                m_PostProcessRenderContext.camera       = camera;
                m_PostProcessRenderContext.source       = m_ColorRT;
                m_PostProcessRenderContext.sourceFormat = UnityEngine.Experimental.Rendering.GraphicsFormatUtility.GetRenderTextureFormat(m_ColorFormatActive);
                m_PostProcessRenderContext.destination  = BuiltinRenderTextureType.CameraTarget;
                m_PostProcessRenderContext.command      = cmdpp;
                m_PostProcessRenderContext.flip         = camera.targetTexture == null;
                m_CameraPostProcessLayer.Render(m_PostProcessRenderContext);

                context.ExecuteCommandBuffer(cmdpp);
                cmdpp.Release();
            }

            //************************** Make sure screen has the thing when Postprocessing is off ************************************
            if (!usePostProcessing)
            {
                CommandBuffer cmdBlitToCam = new CommandBuffer();
                cmdBlitToCam.name = "(" + camera.name + ")" + "Blit back to Camera";
                cmdBlitToCam.Blit(m_ColorRTid, BuiltinRenderTextureType.CameraTarget);
                //MDebug.MyDebug();
                context.ExecuteCommandBuffer(cmdBlitToCam);
                cmdBlitToCam.Release();
            }

            //************************** Clean Up ************************************
            CommandBuffer cmdclean = new CommandBuffer();
            cmdclean.name = "(" + camera.name + ")" + "Clean Up";
            cmdclean.ReleaseTemporaryRT(m_ColorRTid);
            cmdclean.ReleaseTemporaryRT(m_DepthRTid);
            context.ExecuteCommandBuffer(cmdclean);
            cmdclean.Release();

            context.Submit();

            EndCameraRendering(context, camera);
        }

        EndFrameRendering(context, cameras);
    }
Esempio n. 6
0
        private void getEffects()
        {
            post_layer = Camera.main.GetComponent <PostProcessLayer>();
            if (post_layer == null)
            {
                Logger.Debug("Null post layer");
            }
            if (post_layer.enabled == false)
            {
                post_layer.enabled = true;
                Logger.Debug("post_layer was disabled");
            }

            // get global volumes, order by decreasing priority
            var allVolumes = FindObjectsOfType <PostProcessVolume>().Where(v => v.isGlobal).OrderBy(v => - v.priority).ToList();

            Logger.Log($"Found {allVolumes.Count} volumes");
            if (allVolumes.Count == 0)
            {
                Logger.Log("No global volumes");
            }
            else if (allVolumes.Count == 1)
            {
                Logger.Log("Only 1 global volume");
                map_post_volume = post_volume = allVolumes.First();
            }
            else if (allVolumes.Count == 2)                 // expected behaviour
            {
                Logger.Log("Exactly 2 global volumes");
                post_volume     = allVolumes[0];
                map_post_volume = allVolumes[1];
            }
            else
            {
                Logger.Log("More than 2 global volumes");
                post_volume     = allVolumes[0];
                map_post_volume = allVolumes[1];
            }

            // get effects for the mod
            effectSuite = EffectSuite.FromVolume(post_volume);

            // get global map volume effects
            map_effectSuite = EffectSuite.FromVolume(map_post_volume);


            FXAA = post_layer.fastApproximateAntialiasing;
            TAA  = post_layer.temporalAntialiasing;
            SMAA = post_layer.subpixelMorphologicalAntialiasing;

            mapPreset.GetMapEffects();

            // enable volumetric lighting/fog/...
            var pipelineAsset = GraphicsSettings.renderPipelineAsset as HDRenderPipelineAsset;

            pipelineAsset.renderPipelineSettings.supportVolumetrics = true;

            presetsManager.ApplySettings();
            presetsManager.ApplyPresets();

            // After applying, can now save
            Main.canSave = true;

            //cameraController.GetHeadMaterials();
            //DayNightController.Instance.GetLights();

            Logger.Debug("Done getEffects");
        }
Esempio n. 7
0
 // Post-processing misc
 public static bool IsPostProcessingActive(PostProcessLayer layer)
 {
     return(layer != null &&
            layer.enabled);
 }
Esempio n. 8
0
 private void Awake()
 {
     postProcessingLayer = GetComponent <PostProcessLayer>();
 }
        void DrawAdditionalData()
        {
            bool hasChanged = false;
            bool selectedValueShadows;
            CameraOverrideOption   selectedDepthOption;
            CameraOverrideOption   selectedOpaqueOption;
            RendererOverrideOption selectedRendererOption;

            if (m_AdditionalCameraDataSO == null)
            {
                selectedValueShadows   = true;
                selectedDepthOption    = CameraOverrideOption.UsePipelineSettings;
                selectedOpaqueOption   = CameraOverrideOption.UsePipelineSettings;
                selectedRendererOption = RendererOverrideOption.UsePipelineSettings;
            }
            else
            {
                m_AdditionalCameraDataSO.Update();
                selectedValueShadows   = m_AdditionalCameraData.renderShadows;
                selectedDepthOption    = (CameraOverrideOption)m_AdditionalCameraDataRenderDepthProp.intValue;
                selectedOpaqueOption   = (CameraOverrideOption)m_AdditionalCameraDataRenderOpaqueProp.intValue;
                selectedRendererOption = (RendererOverrideOption)m_AdditionalCameraDataRendererProp.intValue;
            }

            // Renderer Type
            Rect controlRectRendererType = EditorGUILayout.GetControlRect(true);

            if (m_AdditionalCameraDataSO != null)
            {
                EditorGUI.BeginProperty(controlRectRendererType, Styles.rendererType, m_AdditionalCameraDataRendererProp);
            }
            EditorGUI.BeginChangeCheck();
            selectedRendererOption = (RendererOverrideOption)EditorGUI.IntPopup(controlRectRendererType, Styles.rendererType, (int)selectedRendererOption, Styles.displayedRendererTypeOverride, Styles.rendererTypeOptions);
            if (EditorGUI.EndChangeCheck())
            {
                hasChanged = true;
            }
            if (m_AdditionalCameraDataSO != null)
            {
                EditorGUI.EndProperty();
            }

            if (selectedRendererOption == RendererOverrideOption.Custom && m_AdditionalCameraDataSO != null)
            {
                EditorGUI.indentLevel++;
                EditorGUI.BeginChangeCheck();
                EditorGUILayout.PropertyField(m_AdditionalCameraDataRendererDataProp, Styles.rendererData);
                if (EditorGUI.EndChangeCheck())
                {
                    hasChanged = true;
                }
                EditorGUI.indentLevel--;
            }

            // Depth Texture
            Rect controlRectDepth = EditorGUILayout.GetControlRect(true);
            // Need to check if post processing is added and active.
            // If it is we will set the int pop to be 1 which is ON and gray it out
            bool             defaultDrawOfDepthTextureUI = true;
            PostProcessLayer ppl = camera.GetComponent <PostProcessLayer>();
            var propValue        = (int)selectedDepthOption;

            if (ppl != null && ppl.isActiveAndEnabled)
            {
                if ((propValue == 2 && !m_LightweightRenderPipeline.supportsCameraDepthTexture) || propValue == 0)
                {
                    EditorGUI.BeginDisabledGroup(true);
                    EditorGUI.IntPopup(controlRectDepth, Styles.requireDepthTexture, 0, Styles.displayedDepthTextureOverride, Styles.additionalDataOptions);
                    EditorGUI.EndDisabledGroup();
                    defaultDrawOfDepthTextureUI = false;
                }
            }
            if (defaultDrawOfDepthTextureUI)
            {
                if (m_AdditionalCameraDataSO != null)
                {
                    EditorGUI.BeginProperty(controlRectDepth, Styles.requireDepthTexture, m_AdditionalCameraDataRenderDepthProp);
                }
                EditorGUI.BeginChangeCheck();

                selectedDepthOption = (CameraOverrideOption)EditorGUI.IntPopup(controlRectDepth, Styles.requireDepthTexture, (int)selectedDepthOption, Styles.displayedAdditionalDataOptions, Styles.additionalDataOptions);
                if (EditorGUI.EndChangeCheck())
                {
                    hasChanged = true;
                }
                if (m_AdditionalCameraDataSO != null)
                {
                    EditorGUI.EndProperty();
                }
            }

            // Opaque Texture
            Rect controlRectColor = EditorGUILayout.GetControlRect(true);

            // Starting to check the property if we have the scriptable object
            if (m_AdditionalCameraDataSO != null)
            {
                EditorGUI.BeginProperty(controlRectColor, Styles.requireOpaqueTexture, m_AdditionalCameraDataRenderOpaqueProp);
            }
            EditorGUI.BeginChangeCheck();
            selectedOpaqueOption = (CameraOverrideOption)EditorGUI.IntPopup(controlRectColor, Styles.requireOpaqueTexture, (int)selectedOpaqueOption, Styles.displayedAdditionalDataOptions, Styles.additionalDataOptions);
            if (EditorGUI.EndChangeCheck())
            {
                hasChanged = true;
            }
            // Ending to check the property if we have the scriptable object
            if (m_AdditionalCameraDataSO != null)
            {
                EditorGUI.EndProperty();
            }

            // Shadows
            Rect controlRectShadows = EditorGUILayout.GetControlRect(true);

            if (m_AdditionalCameraDataSO != null)
            {
                EditorGUI.BeginProperty(controlRectShadows, Styles.renderingShadows, m_AdditionalCameraDataRenderShadowsProp);
            }
            EditorGUI.BeginChangeCheck();

            selectedValueShadows = EditorGUI.Toggle(controlRectShadows, Styles.renderingShadows, selectedValueShadows);
            if (EditorGUI.EndChangeCheck())
            {
                hasChanged = true;
            }
            if (m_AdditionalCameraDataSO != null)
            {
                EditorGUI.EndProperty();
            }

            if (hasChanged)
            {
                if (m_AdditionalCameraDataSO == null)
                {
                    m_AdditionalCameraData = camera.gameObject.AddComponent <LWRPAdditionalCameraData>();
                    init(m_AdditionalCameraData);
                }
                m_AdditionalCameraDataRenderShadowsProp.boolValue = selectedValueShadows;
                m_AdditionalCameraDataRenderDepthProp.intValue    = (int)selectedDepthOption;
                m_AdditionalCameraDataRenderOpaqueProp.intValue   = (int)selectedOpaqueOption;
                m_AdditionalCameraDataRendererProp.intValue       = (int)selectedRendererOption;
                m_AdditionalCameraDataSO.ApplyModifiedProperties();
            }
        }
Esempio n. 10
0
        public FrameRenderingConfiguration SetupFrameRenderingConfiguration(CameraContext cameraContext, ShadowManager shadowManager)
        {
            var configuration = (cameraContext.StereoEnabled) ? FrameRenderingConfiguration.Stereo : FrameRenderingConfiguration.None;

            if (cameraContext.StereoEnabled && XRSettings.eyeTextureDesc.dimension == TextureDimension.Tex2DArray)
            {
                m_IntermediateTextureArray = true;
            }
            else
            {
                m_IntermediateTextureArray = false;
            }

            var  camera              = cameraContext.Camera;
            bool hdrEnabled          = m_Asset.SupportsHDR && camera.allowHDR;
            bool intermediateTexture = camera.targetTexture != null || camera.cameraType == CameraType.SceneView ||
                                       m_Asset.RenderScale < 1.0f || hdrEnabled;

            m_ColorFormat            = hdrEnabled ? RenderTextureFormat.DefaultHDR : RenderTextureFormat.Default;
            m_RequireCopyColor       = false;
            m_DepthRenderBuffer      = false;
            m_CameraPostProcessLayer = camera.GetComponent <PostProcessLayer>();

            bool msaaEnabled = camera.allowMSAA && m_Asset.MSAASampleCount > 1 && (camera.targetTexture == null || camera.targetTexture.antiAliasing > 1);

            // TODO: PostProcessing and SoftParticles are currently not support for VR
            bool postProcessEnabled = m_CameraPostProcessLayer != null && m_CameraPostProcessLayer.enabled && !cameraContext.StereoEnabled;

            m_RequireDepthTexture = m_Asset.RequireDepthTexture && !cameraContext.StereoEnabled;
            if (postProcessEnabled)
            {
                m_RequireDepthTexture = true;
                intermediateTexture   = true;

                configuration |= FrameRenderingConfiguration.PostProcess;
                if (m_CameraPostProcessLayer.HasOpaqueOnlyEffects(m_PostProcessRenderContext))
                {
                    configuration |= FrameRenderingConfiguration.BeforeTransparentPostProcess;
                    if (m_CameraPostProcessLayer.sortedBundles[PostProcessEvent.BeforeTransparent].Count == 1)
                    {
                        m_RequireCopyColor = true;
                    }
                }
            }

            if (cameraContext.SceneViewCamera)
            {
                m_RequireDepthTexture = true;
            }

            if (shadowManager.Shadows)
            {
                m_RequireDepthTexture = shadowManager.IsScreenSpace;

                if (!msaaEnabled)
                {
                    intermediateTexture = true;
                }
            }

            if (msaaEnabled)
            {
                configuration      |= FrameRenderingConfiguration.Msaa;
                intermediateTexture = intermediateTexture || !LightweightUtils.PlatformSupportsMSAABackBuffer();
            }

            if (m_RequireDepthTexture)
            {
                // If msaa is enabled we don't use a depth renderbuffer as we might not have support to Texture2DMS to resolve depth.
                // Instead we use a depth prepass and whenever depth is needed we use the 1 sample depth from prepass.
                // Screen space shadows require depth before opaque shading.
                if (!msaaEnabled && !shadowManager.Shadows)
                {
                    bool supportsDepthCopy = m_CopyTextureSupport != CopyTextureSupport.None && m_Asset.CopyDepthShader.isSupported;
                    m_DepthRenderBuffer = true;
                    intermediateTexture = true;

                    // If requiring a camera depth texture we need separate depth as it reads/write to depth at same time
                    // Post process doesn't need the copy
                    if (!m_Asset.RequireDepthTexture && postProcessEnabled)
                    {
                        configuration |= (supportsDepthCopy) ? FrameRenderingConfiguration.DepthCopy : FrameRenderingConfiguration.DepthPrePass;
                    }
                }
                else
                {
                    configuration |= FrameRenderingConfiguration.DepthPrePass;
                }
            }

            Rect cameraRect = camera.rect;

            if (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f || Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f))
            {
                configuration |= FrameRenderingConfiguration.DefaultViewport;
            }
            else
            {
                intermediateTexture = true;
            }

            if (intermediateTexture)
            {
                configuration |= FrameRenderingConfiguration.IntermediateTexture;
            }

            return(configuration);
        }
Esempio n. 11
0
        // Pass all the systems that may want to update per-camera data here.
        // That way you will never update an HDCamera and forget to update the dependent system.
        public void Update(FrameSettings currentFrameSettings, PostProcessLayer postProcessLayer, VolumetricLightingSystem vlSys, MSAASamples msaaSamples)
        {
            // store a shortcut on HDAdditionalCameraData (done here and not in the constructor as
            // we don't create HDCamera at every frame and user can change the HDAdditionalData later (Like when they create a new scene).
            m_AdditionalCameraData = camera.GetComponent <HDAdditionalCameraData>();

            m_frameSettings = currentFrameSettings;

            // Handle memory allocation.
            {
                bool isColorPyramidHistoryRequired = m_frameSettings.enableSSR; // TODO: TAA as well
                bool isVolumetricHistoryRequired   = m_frameSettings.enableVolumetrics && m_frameSettings.enableReprojectionForVolumetrics;

                int numColorPyramidBuffersRequired = isColorPyramidHistoryRequired ? 2 : 1; // TODO: 1 -> 0
                int numVolumetricBuffersRequired   = isVolumetricHistoryRequired   ? 2 : 0; // History + feedback

                if ((numColorPyramidBuffersAllocated != numColorPyramidBuffersRequired) ||
                    (numVolumetricBuffersAllocated != numVolumetricBuffersRequired))
                {
                    // Reinit the system.
                    colorPyramidHistoryIsValid = false;
                    vlSys.DeinitializePerCameraData(this);

                    // The history system only supports the "nuke all" option.
                    m_HistoryRTSystem.Dispose();
                    m_HistoryRTSystem = new BufferedRTHandleSystem();

                    if (numColorPyramidBuffersRequired != 0)
                    {
                        AllocHistoryFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain, HistoryBufferAllocatorFunction, numColorPyramidBuffersRequired);
                        colorPyramidHistoryIsValid = false;
                    }

                    vlSys.InitializePerCameraData(this, numVolumetricBuffersRequired);

                    // Mark as init.
                    numColorPyramidBuffersAllocated = numColorPyramidBuffersRequired;
                    numVolumetricBuffersAllocated   = numVolumetricBuffersRequired;
                }
            }

            // In stereo, this corresponds to the center eye position
            var pos = camera.transform.position;

            worldSpaceCameraPos = pos;

            // If TAA is enabled projMatrix will hold a jittered projection matrix. The original,
            // non-jittered projection matrix can be accessed via nonJitteredProjMatrix.
            bool taaEnabled = camera.cameraType == CameraType.Game &&
                              HDUtils.IsTemporalAntialiasingActive(postProcessLayer) &&
                              m_frameSettings.enablePostprocess;

            var nonJitteredCameraProj = camera.projectionMatrix;
            var cameraProj            = taaEnabled
                ? postProcessLayer.temporalAntialiasing.GetJitteredProjectionMatrix(camera)
                : nonJitteredCameraProj;

            // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
            // (different Z value ranges etc.)
            var gpuProj            = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false'
            var gpuView            = camera.worldToCameraMatrix;
            var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);

            // Update viewport sizes.
            m_ViewportSizePrevFrame = new Vector2Int(m_ActualWidth, m_ActualHeight);
            m_ActualWidth           = camera.pixelWidth;
            m_ActualHeight          = camera.pixelHeight;

            var screenWidth  = m_ActualWidth;
            var screenHeight = m_ActualHeight;

            textureWidthScaling = new Vector4(1.0f, 1.0f, 0.0f, 0.0f);

            numEyes = camera.stereoEnabled ? (uint)2 : (uint)1; // TODO VR: Generalize this when support for >2 eyes comes out with XR SDK

            if (camera.stereoEnabled)
            {
                textureWidthScaling = new Vector4(2.0f, 0.5f, 0.0f, 0.0f);
                for (uint eyeIndex = 0; eyeIndex < 2; eyeIndex++)
                {
                    // For VR, TAA proj matrices don't need to be jittered
                    var currProjStereo    = camera.GetStereoProjectionMatrix((Camera.StereoscopicEye)eyeIndex);
                    var gpuCurrProjStereo = GL.GetGPUProjectionMatrix(currProjStereo, true);
                    var gpuCurrViewStereo = camera.GetStereoViewMatrix((Camera.StereoscopicEye)eyeIndex);

                    if (ShaderConfig.s_CameraRelativeRendering != 0)
                    {
                        // Zero out the translation component.
                        gpuCurrViewStereo.SetColumn(3, new Vector4(0, 0, 0, 1));
                    }
                    var gpuCurrVPStereo = gpuCurrProjStereo * gpuCurrViewStereo;

                    // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
                    if (m_LastFrameActive != Time.frameCount)
                    {
                        if (isFirstFrame)
                        {
                            prevViewProjMatrixStereo[eyeIndex] = gpuCurrVPStereo;
                        }
                        else
                        {
                            prevViewProjMatrixStereo[eyeIndex] = GetViewProjMatrixStereo(eyeIndex); // Grabbing this before ConfigureStereoMatrices updates view/proj
                        }

                        isFirstFrame = false;
                    }
                }
                isFirstFrame = true; // So that mono vars can still update when stereo active

                screenWidth  = XRGraphics.eyeTextureWidth;
                screenHeight = XRGraphics.eyeTextureHeight;

                var xrDesc = XRGraphics.eyeTextureDesc;
                m_ActualWidth  = xrDesc.width;
                m_ActualHeight = xrDesc.height;
            }

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                // Zero out the translation component.
                gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
            }

            var gpuVP = gpuNonJitteredProj * gpuView;

            // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
            if (m_LastFrameActive != Time.frameCount)
            {
                if (isFirstFrame)
                {
                    prevCameraPos      = pos;
                    prevViewProjMatrix = gpuVP;
                }
                else
                {
                    prevCameraPos      = cameraPos;
                    prevViewProjMatrix = nonJitteredViewProjMatrix;
                }

                isFirstFrame = false;
            }

            taaFrameIndex    = taaEnabled ? (uint)postProcessLayer.temporalAntialiasing.sampleIndex : 0;
            taaFrameRotation = new Vector2(Mathf.Sin(taaFrameIndex * (0.5f * Mathf.PI)),
                                           Mathf.Cos(taaFrameIndex * (0.5f * Mathf.PI)));

            viewMatrix            = gpuView;
            projMatrix            = gpuProj;
            nonJitteredProjMatrix = gpuNonJitteredProj;
            cameraPos             = pos;

            ConfigureStereoMatrices();

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                Matrix4x4 cameraDisplacement = Matrix4x4.Translate(cameraPos - prevCameraPos); // Non-camera-relative positions
                prevViewProjMatrix *= cameraDisplacement;                                      // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS
            }

            float n = camera.nearClipPlane;
            float f = camera.farClipPlane;

            // Analyze the projection matrix.
            // p[2][3] = (reverseZ ? 1 : -1) * (depth_0_1 ? 1 : 2) * (f * n) / (f - n)
            float scale     = projMatrix[2, 3] / (f * n) * (f - n);
            bool  depth_0_1 = Mathf.Abs(scale) < 1.5f;
            bool  reverseZ  = scale > 0;
            bool  flipProj  = projMatrix.inverse.MultiplyPoint(new Vector3(0, 1, 0)).y < 0;

            // http://www.humus.name/temp/Linearize%20depth.txt
            if (reverseZ)
            {
                zBufferParams = new Vector4(-1 + f / n, 1, -1 / f + 1 / n, 1 / f);
            }
            else
            {
                zBufferParams = new Vector4(1 - f / n, f / n, 1 / f - 1 / n, 1 / n);
            }

            projectionParams = new Vector4(flipProj ? -1 : 1, n, f, 1.0f / f);

            float orthoHeight = camera.orthographic ? 2 * camera.orthographicSize : 0;
            float orthoWidth  = orthoHeight * camera.aspect;

            unity_OrthoParams = new Vector4(orthoWidth, orthoHeight, 0, camera.orthographic ? 1 : 0);

            Frustum.Create(frustum, viewProjMatrix, depth_0_1, reverseZ);

            // Left, right, top, bottom, near, far.
            for (int i = 0; i < 6; i++)
            {
                frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance);
            }

            m_LastFrameActive = Time.frameCount;

            // TODO: cache this, or make the history system spill the beans...
            Vector2Int prevColorPyramidBufferSize = Vector2Int.zero;

            if (numColorPyramidBuffersAllocated > 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain).rt;

                prevColorPyramidBufferSize.x = rt.width;
                prevColorPyramidBufferSize.y = rt.height;
            }

            // TODO: cache this, or make the history system spill the beans...
            Vector3Int prevVolumetricBufferSize = Vector3Int.zero;

            if (numVolumetricBuffersAllocated != 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting).rt;

                prevVolumetricBufferSize.x = rt.width;
                prevVolumetricBufferSize.y = rt.height;
                prevVolumetricBufferSize.z = rt.volumeDepth;
            }

            // Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
            m_msaaSamples = msaaSamples;
            RTHandles.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_msaaSamples);
            m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_msaaSamples);
            m_HistoryRTSystem.Swap();

            Vector3Int currColorPyramidBufferSize = Vector3Int.zero;

            if (numColorPyramidBuffersAllocated != 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.ColorBufferMipChain).rt;

                currColorPyramidBufferSize.x = rt.width;
                currColorPyramidBufferSize.y = rt.height;

                if ((currColorPyramidBufferSize.x != prevColorPyramidBufferSize.x) ||
                    (currColorPyramidBufferSize.y != prevColorPyramidBufferSize.y))
                {
                    // A reallocation has happened, so the new texture likely contains garbage.
                    colorPyramidHistoryIsValid = false;
                }
            }

            Vector3Int currVolumetricBufferSize = Vector3Int.zero;

            if (numVolumetricBuffersAllocated != 0)
            {
                var rt = GetCurrentFrameRT((int)HDCameraFrameHistoryType.VolumetricLighting).rt;

                currVolumetricBufferSize.x = rt.width;
                currVolumetricBufferSize.y = rt.height;
                currVolumetricBufferSize.z = rt.volumeDepth;

                if ((currVolumetricBufferSize.x != prevVolumetricBufferSize.x) ||
                    (currVolumetricBufferSize.y != prevVolumetricBufferSize.y) ||
                    (currVolumetricBufferSize.z != prevVolumetricBufferSize.z))
                {
                    // A reallocation has happened, so the new texture likely contains garbage.
                    volumetricHistoryIsValid = false;
                }
            }

            int maxWidth  = RTHandles.maxWidth;
            int maxHeight = RTHandles.maxHeight;

            Vector2 rcpTextureSize = Vector2.one / new Vector2(maxWidth, maxHeight);

            m_ViewportScalePreviousFrame = m_ViewportSizePrevFrame * rcpTextureSize;
            m_ViewportScaleCurrentFrame  = new Vector2Int(m_ActualWidth, m_ActualHeight) * rcpTextureSize;

            screenSize   = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight);
            screenParams = new Vector4(screenSize.x, screenSize.y, 1 + screenSize.z, 1 + screenSize.w);

            if (vlSys != null)
            {
                vlSys.UpdatePerCameraData(this);
            }

            UpdateVolumeParameters();
        }
Esempio n. 12
0
 private void Awake()
 {
     ppLayer = GetComponent <PostProcessLayer>();
     GraphicsSettings.PostProcessingChanged.AddListener(UpdateCamera);
 }
Esempio n. 13
0
        /// <summary>
        /// Apply URP post fx
        /// </summary>
        /// <param name="profile"></param>
        /// <param name="lightingProfile"></param>
        public static GameObject ApplyURPPostProcessing(GaiaLightingProfileValues profile, SceneProfile lightingProfile)
        {
            try
            {
                GameObject volumeObject = null;
#if UPPipeline
                if (lightingProfile.m_enablePostProcessing)
                {
                    volumeObject = GameObject.Find("Global Post Processing");
                    if (volumeObject == null)
                    {
                        volumeObject = new GameObject("Global Post Processing");
                    }

                    GameObject parentObject = GaiaLighting.GetOrCreateParentObject(GaiaConstants.gaiaLightingObject, true);
                    if (parentObject != null)
                    {
                        volumeObject.transform.SetParent(parentObject.transform);
                    }
                    volumeObject.layer = 0;

                    Volume volume = volumeObject.GetComponent <Volume>();
                    if (volume == null)
                    {
                        volume = volumeObject.AddComponent <Volume>();
                    }

                    if (GaiaGlobal.Instance != null)
                    {
                        SceneProfile sceneProfile = GaiaGlobal.Instance.SceneProfile;
                        if (sceneProfile != null)
                        {
                            if (sceneProfile.m_lightingEditSettings || profile.m_userCustomProfile)
                            {
                                sceneProfile.m_universalPostFXProfile = profile.PostProcessProfileURP;
                            }
                            else
                            {
                                CreatePostFXProfileInstance(sceneProfile, profile);
                            }

                            volume.sharedProfile = sceneProfile.m_universalPostFXProfile;

                            Camera camera = GaiaUtils.GetCamera();
                            if (camera != null)
                            {
                                UniversalAdditionalCameraData cameraData = camera.GetComponent <UniversalAdditionalCameraData>();
                                if (cameraData == null)
                                {
                                    cameraData = camera.gameObject.AddComponent <UniversalAdditionalCameraData>();
                                }

                                cameraData.renderPostProcessing = true;
                                GaiaLighting.ConfigureAntiAliasing(sceneProfile, GaiaConstants.EnvironmentRenderer.Universal);
                            }
                        }
                    }
                }
                else
                {
                    volumeObject = GameObject.Find("Global Post Processing");
                    if (volumeObject != null)
                    {
                        GameObject.DestroyImmediate(volumeObject);
                    }

                    Camera camera = GaiaUtils.GetCamera();
                    if (camera != null)
                    {
                        UniversalAdditionalCameraData cameraData = camera.GetComponent <UniversalAdditionalCameraData>();
                        if (cameraData == null)
                        {
                            cameraData = camera.gameObject.AddComponent <UniversalAdditionalCameraData>();
                        }

                        cameraData.renderPostProcessing = false;
                    }
                }
    #endif
    #if UNITY_POST_PROCESSING_STACK_V2
                PostProcessLayer postProcessLayer = GameObject.FindObjectOfType <PostProcessLayer>();
                if (postProcessLayer != null)
                {
                    GameObject.DestroyImmediate(postProcessLayer);
                }

                PostProcessVolume postProcessVolume = GameObject.FindObjectOfType <PostProcessVolume>();
                if (postProcessVolume != null)
                {
                    GameObject.DestroyImmediate(postProcessVolume);
                }
    #endif

                return(volumeObject);
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
                throw;
            }
        }
 public PostProcessingSettings(Camera camera)
 {
     initialCamera     = camera;
     _postProcessLayer = camera.GetComponent <PostProcessLayer>();
     SetupVolume();
 }
Esempio n. 15
0
        public static void GetFrameConfigration(RGCamera rgCam, PostProcessLayer postProcessLayer, VolumetricSettings volumetricSettings, ref FrameConfigration frameConfig)
        {
            AdditionalCameraData acd = rgCam.camera.GetComponent <AdditionalCameraData>();

            frameConfig.enableHDR         = true;
            frameConfig.enablePostprocess = CoreUtils.IsPostProcessingActive(postProcessLayer);
            if (acd)
            {
                switch (acd.m_RenderingType)
                {
                case AdditionalCameraData.RenderingType.ClusterLight:
                    frameConfig.enableSky               = true;
                    frameConfig.enableShadows           = true;
                    frameConfig.enableAsyncCompute      = true;
                    frameConfig.enableClusterLighting   = true;
                    frameConfig.enableVolumetricDisplay = acd.m_DisplayVolumetricFog;
                    break;

                case AdditionalCameraData.RenderingType.Unlit:
                    frameConfig.enableShadows           = false;
                    frameConfig.enableAsyncCompute      = false;
                    frameConfig.enableSky               = true;
                    frameConfig.enableDepthPrePass      = false;
                    frameConfig.enableClusterLighting   = false;
                    frameConfig.enableVolumetricDisplay = false;
                    break;

                case AdditionalCameraData.RenderingType.SimpleLight:
                    frameConfig.enableAsyncCompute      = false;
                    frameConfig.enableSky               = true;
                    frameConfig.enableDepthPrePass      = false;
                    frameConfig.enableVolumetricFog     = false;
                    frameConfig.enableClusterLighting   = false;
                    frameConfig.enableVolumetricDisplay = false;
                    break;

                case AdditionalCameraData.RenderingType.StaticShadow:
                    frameConfig.enableSky               = false;
                    frameConfig.enableShadows           = false;
                    frameConfig.enableAsyncCompute      = false;
                    frameConfig.enableDepthPrePass      = false;
                    frameConfig.enableVolumetricFog     = false;
                    frameConfig.enableClusterLighting   = false;
                    frameConfig.enableVolumetricDisplay = false;
                    break;

                default:
                    break;
                }
            }
#if UNITY_EDITOR
            else if (rgCam.camera.cameraType == CameraType.SceneView)
            {
                frameConfig.enableSky               = true;
                frameConfig.enableShadows           = true;
                frameConfig.enableAsyncCompute      = true;
                frameConfig.enableClusterLighting   = true;
                frameConfig.enableVolumetricDisplay = true;
            }
#endif

            bool msaaChanged    = false;
            var  commonSettings = VolumeManager.instance.stack.GetComponent <CommonSettings>();
            if (commonSettings)
            {
                frameConfig.enableAsyncCompute      = commonSettings.enableAsyncCompute;
                frameConfig.hasRefraction           = commonSettings.hasRefraction;
                frameConfig.enableScreenSpaceShadow = commonSettings.enableScreenSpaceShadow;
                frameConfig.enableDepthPrePass      = commonSettings.enableDepthPrepass;
                msaaChanged = frameConfig.rtConfig.msaaSamples != commonSettings.msaaSamples;
                frameConfig.enableLightCullingMask = commonSettings.enableLightCullingMask;
                frameConfig.rtConfig.msaaSamples   = commonSettings.msaaSamples;
            }

            frameConfig.enableGradientResolution = rgCam.camera.stereoEnabled && !frameConfig.hasRefraction;

            frameConfig.enableVolumetricLighting = false;
            frameConfig.enableVolumetricFog      = false;
            if (volumetricSettings /* && volumetricSettings.BaseDensity != 0*/)
            {
                frameConfig.enableVolumetricLighting = true;
                frameConfig.enableVolumetricFog      = true;
            }

#if UNITY_EDITOR
            if (rgCam.camera.cameraType == CameraType.Preview || rgCam.camera.name.Contains("Preview"))
            {
                frameConfig.enableDepthPrePass       = false;
                frameConfig.enablePostprocess        = false;
                frameConfig.enableClusterLighting    = true;
                frameConfig.enableVolumetricFog      = false;
                frameConfig.enableVolumetricLighting = false;
                frameConfig.enableHalfResParticle    = false;
                frameConfig.enableShadows            = false;
                frameConfig.enableSky               = false;
                frameConfig.enableAsyncCompute      = false;
                frameConfig.enableScreenSpaceShadow = false;
                frameConfig.enableStaticShadowmap   = false;
                frameConfig.rtConfig.msaaSamples    = MSAASamples.None;
            }
#endif

            if (rgCam.CameraWidth != frameConfig.rtConfig.frameWidth || rgCam.CameraHeight != frameConfig.rtConfig.frameHeight || rgCam.RenderTextureDimension != frameConfig.rtConfig.textureDimension || msaaChanged)
            {
                frameConfig.rtConfigChanged           = true;
                frameConfig.rtConfig.frameWidth       = rgCam.CameraWidth;
                frameConfig.rtConfig.frameHeight      = rgCam.CameraHeight;
                frameConfig.rtConfig.textureDimension = rgCam.RenderTextureDimension;
#if UNITY_EDITOR
                if (frameConfig.enableVolumetricFog || frameConfig.enableVolumetricLighting)
                {
                    if (frameConfig.rtConfig.volumetricWidth != rgCam.CameraWidth || frameConfig.rtConfig.volumetricHeight != rgCam.CameraHeight)
                    {
                        frameConfig.rtConfig.volumetricWidth          = rgCam.CameraWidth;
                        frameConfig.rtConfig.volumetricHeight         = rgCam.CameraHeight;
                        frameConfig.rtConfig.volumetricNeedReallocate = true;
                    }
                    else
                    {
                        frameConfig.rtConfig.volumetricNeedReallocate = false;
                    }
                }
#endif
            }
            else
            {
                frameConfig.rtConfigChanged = false;
            }

            if (frameConfig.rtConfigChanged)
            {
                frameConfig.clusterConfigChanged = true;
            }
            else
            {
                frameConfig.clusterConfigChanged = false;
            }
        }
 public void SetPostProcess(Scene scene, LoadSceneMode mode)
 {
     ppProfile = FindObjectOfType <PostProcessVolume>();
     ppLayer   = FindObjectOfType <PostProcessLayer>();
 }
 private void Awake()
 {
     ppProfile = FindObjectOfType <PostProcessVolume>();
     ppLayer   = FindObjectOfType <PostProcessLayer>();
 }
Esempio n. 18
0
        public static void SetupPostProcessing()
        {
#if UNITY_POST_PROCESSING_STACK_V2 && UNITY_2018_3_OR_NEWER
            const string defaultName = "WeatherMakerPostProcessingProfile_Default";
            string[]     assets      = AssetDatabase.FindAssets("WeatherMakerPrefab");
            foreach (string asset in assets)
            {
                string path = AssetDatabase.GUIDToAssetPath(asset);
                if (path.EndsWith("WeatherMakerPrefab.prefab", StringComparison.OrdinalIgnoreCase))
                {
                    string dir         = Path.GetDirectoryName(path);
                    string profilesDir = Path.Combine(dir, "Profiles");
                    profilesDir = Path.Combine(profilesDir, "PostProcessing");
                    string assetPath = Path.Combine(profilesDir, defaultName + ".asset");
                    Directory.CreateDirectory(profilesDir);
                    PostProcessProfile profile = AssetDatabase.LoadAssetAtPath <PostProcessProfile>(assetPath);

                    if (profile == null)
                    {
                        EditorUtility.DisplayDialog("Error", "Unable to find Weather Maker default post processing profile at '" + assetPath + "'", "OK");
                        return;

                        /*
                         * profile = ScriptableObject.CreateInstance<PostProcessProfile>();
                         * profile.name = defaultName;
                         * AmbientOcclusion ambient = profile.AddSettings<AmbientOcclusion>();
                         * ambient.intensity = new FloatParameter { value = 0.5f, overrideState = true };
                         * Bloom bloom = profile.AddSettings<Bloom>();
                         * bloom.intensity = new FloatParameter { value = 3.0f, overrideState = true };
                         * ColorGrading color = profile.AddSettings<ColorGrading>();
                         * color.tonemapper = new TonemapperParameter { value = Tonemapper.ACES, overrideState = true };
                         * color.postExposure = new FloatParameter { value = 0.7f, overrideState = true };
                         * DepthOfField df = profile.AddSettings<DepthOfField>();
                         * df.focusDistance = new FloatParameter { value = 3.5f, overrideState = true };
                         * Vignette vig = profile.AddSettings<Vignette>();
                         * vig.intensity = new FloatParameter { value = 0.3f, overrideState = true };
                         *
                         * if (createProfile)
                         * {
                         *  AssetDatabase.CreateAsset(profile, assetPath);
                         *  AssetDatabase.SaveAssets();
                         *  AssetDatabase.Refresh();
                         *  EditorUtility.FocusProjectWindow();
                         * }
                         */
                    }

                    Camera cam = Camera.main;
                    if (cam == null)
                    {
                        EditorUtility.DisplayDialog("Error", "Could not find main camera in scene, make sure your camera is tagged as main camera", "OK");
                        return;
                    }

                    PostProcessLayer layer = cam.GetComponent <PostProcessLayer>();
                    if (layer == null)
                    {
                        layer = Undo.AddComponent <PostProcessLayer>(cam.gameObject);
                    }
                    layer.antialiasingMode = PostProcessLayer.Antialiasing.SubpixelMorphologicalAntialiasing;
                    layer.fog = new Fog {
                        enabled = false, excludeSkybox = true
                    };
                    layer.volumeLayer = -1;

                    PostProcessVolume volume = cam.GetComponent <PostProcessVolume>();
                    if (volume == null)
                    {
                        volume = Undo.AddComponent <PostProcessVolume>(cam.gameObject);
                    }
                    volume.sharedProfile = profile;
                    volume.isGlobal      = true;
                    Undo.RegisterCompleteObjectUndo(layer, "Weather Maker Add Post Processing Volume");

                    Selection.activeObject = cam.gameObject;

                    EditorUtility.DisplayDialog("Success", "Post processing profile created at '" + defaultName + "' and applied to main camera. " +
                                                "You should configure your layer on the camera and post processing for best performance.", "OK");

                    return;
                }
            }

            EditorUtility.DisplayDialog("Error", "There was an error setting up the post processing stack, please do it manually", "OK");
#else
            EditorUtility.DisplayDialog("Error", "Please use Unity 2018.3 or newer and use the package manager to add the post processing stack v2. " +
                                        "Also be sure 'UNITY_POST_PROCESSING_STACK_V2' is in player settings -> scripting defines. Do all this and try again.", "OK");
#endif
        }
 void Reset()
 {
     postProcessLayer = GetComponent <PostProcessLayer>();
 }
Esempio n. 20
0
        public void Update(PostProcessLayer postProcessLayer, FrameSettings frameSettings)
        {
            // If TAA is enabled projMatrix will hold a jittered projection matrix. The original,
            // non-jittered projection matrix can be accessed via nonJitteredProjMatrix.
            bool taaEnabled = Application.isPlaying && camera.cameraType == CameraType.Game &&
                              CoreUtils.IsTemporalAntialiasingActive(postProcessLayer);

            var nonJitteredCameraProj = camera.projectionMatrix;
            var cameraProj            = taaEnabled
                ? postProcessLayer.temporalAntialiasing.GetJitteredProjectionMatrix(camera)
                : nonJitteredCameraProj;

            // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
            // (different Z value ranges etc.)
            var gpuProj            = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false'
            var gpuView            = camera.worldToCameraMatrix;
            var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);

            var pos    = camera.transform.position;
            var relPos = pos; // World-origin-relative

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                // Zero out the translation component.
                gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
                relPos = Vector3.zero; // Camera-relative
            }

            var gpuVP = gpuNonJitteredProj * gpuView;

            // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
            if (m_LastFrameActive != Time.frameCount)
            {
                if (isFirstFrame)
                {
                    prevCameraPos      = pos;
                    prevViewProjMatrix = gpuVP;
                }
                else
                {
                    prevCameraPos      = cameraPos;
                    prevViewProjMatrix = nonJitteredViewProjMatrix;
                }

                isFirstFrame = false;
            }

            const uint taaFrameCount = 8;

            taaFrameIndex    = taaEnabled ? (uint)Time.renderedFrameCount % taaFrameCount : 0;
            taaFrameRotation = new Vector2(Mathf.Sin(taaFrameIndex * (0.5f * Mathf.PI)),
                                           Mathf.Cos(taaFrameIndex * (0.5f * Mathf.PI)));

            viewMatrix            = gpuView;
            projMatrix            = gpuProj;
            nonJitteredProjMatrix = gpuNonJitteredProj;
            cameraPos             = pos;
            viewParam             = new Vector4(viewMatrix.determinant, 0.0f, 0.0f, 0.0f);

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                Matrix4x4 cameraDisplacement = Matrix4x4.Translate(cameraPos - prevCameraPos); // Non-camera-relative positions
                prevViewProjMatrix *= cameraDisplacement;                                      // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS
            }

            // Warning: near and far planes appear to be broken (or rather far plane seems broken)
            GeometryUtility.CalculateFrustumPlanes(viewProjMatrix, frustumPlanes);

            for (int i = 0; i < 4; i++)
            {
                // Left, right, top, bottom.
                frustumPlaneEquations[i] = new Vector4(frustumPlanes[i].normal.x, frustumPlanes[i].normal.y, frustumPlanes[i].normal.z, frustumPlanes[i].distance);
            }

            // Near, far.
            Vector4 forward = (camera.cameraType == CameraType.Reflection) ? camera.worldToCameraMatrix.GetRow(2) : new Vector4(camera.transform.forward.x, camera.transform.forward.y, camera.transform.forward.z, 0.0f);

            // We need to switch forward direction based on handness (Reminder: Regular camera have a negative determinant in Unity and reflection probe follow DX convention and have a positive determinant)
            forward = viewParam.x < 0.0f ? forward : -forward;
            frustumPlaneEquations[4] = new Vector4(forward.x, forward.y, forward.z, -Vector3.Dot(forward, relPos) - camera.nearClipPlane);
            frustumPlaneEquations[5] = new Vector4(-forward.x, -forward.y, -forward.z, Vector3.Dot(forward, relPos) + camera.farClipPlane);

            m_LastFrameActive = Time.frameCount;

            RenderTextureDescriptor tempDesc;

            if (frameSettings.enableStereo)
            {
                screenSize = new Vector4(XRSettings.eyeTextureWidth, XRSettings.eyeTextureHeight, 1.0f / XRSettings.eyeTextureWidth, 1.0f / XRSettings.eyeTextureHeight);
                tempDesc   = XRSettings.eyeTextureDesc;
            }
            else
            {
                screenSize = new Vector4(camera.pixelWidth, camera.pixelHeight, 1.0f / camera.pixelWidth, 1.0f / camera.pixelHeight);
                tempDesc   = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
            }

            if (frameSettings.enableMSAA)
            {
                // this is already pre-validated to be a valid sample count by InitializeFrameSettings
                var sampleCount = QualitySettings.antiAliasing;
                tempDesc.msaaSamples = sampleCount;
            }
            else
            {
                tempDesc.msaaSamples = 1;
            }
            tempDesc.depthBufferBits   = 0;
            tempDesc.autoGenerateMips  = false;
            tempDesc.useMipMap         = false;
            tempDesc.enableRandomWrite = false;
            tempDesc.memoryless        = RenderTextureMemoryless.None;

            renderTextureDesc = tempDesc;
        }
Esempio n. 21
0
        // Pass all the systems that may want to update per-camera data here.
        // That way you will never update an HDCamera and forget to update the dependent system.
        public void Update(FrameSettings currentFrameSettings, PostProcessLayer postProcessLayer, VolumetricLightingSystem vlSys)
        {
            // store a shortcut on HDAdditionalCameraData (done here and not in the constructor as
            // we do'nt create HDCamera at every frame and user can change the HDAdditionalData later (Like when they create a new scene).
            m_AdditionalCameraData = camera.GetComponent <HDAdditionalCameraData>();

            m_frameSettings = currentFrameSettings;

            // If TAA is enabled projMatrix will hold a jittered projection matrix. The original,
            // non-jittered projection matrix can be accessed via nonJitteredProjMatrix.
            bool taaEnabled = camera.cameraType == CameraType.Game &&
                              HDUtils.IsTemporalAntialiasingActive(postProcessLayer) &&
                              m_frameSettings.enablePostprocess;

            var nonJitteredCameraProj = camera.projectionMatrix;
            var cameraProj            = taaEnabled
                ? postProcessLayer.temporalAntialiasing.GetJitteredProjectionMatrix(camera)
                : nonJitteredCameraProj;

            // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
            // (different Z value ranges etc.)
            var gpuProj            = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false'
            var gpuView            = camera.worldToCameraMatrix;
            var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);

            // In stereo, this corresponds to the center eye position
            var pos = camera.transform.position;

            worldSpaceCameraPos = pos;

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                // Zero out the translation component.
                gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
            }

            var gpuVP = gpuNonJitteredProj * gpuView;

            // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
            if (m_LastFrameActive != Time.frameCount)
            {
                if (isFirstFrame)
                {
                    prevCameraPos      = pos;
                    prevViewProjMatrix = gpuVP;
                }
                else
                {
                    prevCameraPos      = cameraPos;
                    prevViewProjMatrix = nonJitteredViewProjMatrix;
                }

                isFirstFrame = false;
            }

            taaFrameIndex    = taaEnabled ? (uint)postProcessLayer.temporalAntialiasing.sampleIndex : 0;
            taaFrameRotation = new Vector2(Mathf.Sin(taaFrameIndex * (0.5f * Mathf.PI)),
                                           Mathf.Cos(taaFrameIndex * (0.5f * Mathf.PI)));

            viewMatrix            = gpuView;
            projMatrix            = gpuProj;
            nonJitteredProjMatrix = gpuNonJitteredProj;
            cameraPos             = pos;
            detViewMatrix         = viewMatrix.determinant;

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                Matrix4x4 cameraDisplacement = Matrix4x4.Translate(cameraPos - prevCameraPos); // Non-camera-relative positions
                prevViewProjMatrix *= cameraDisplacement;                                      // Now prevViewProjMatrix correctly transforms this frame's camera-relative positionWS
            }

            float n = camera.nearClipPlane;
            float f = camera.farClipPlane;

            // Analyze the projection matrix.
            // p[2][3] = (reverseZ ? 1 : -1) * (depth_0_1 ? 1 : 2) * (f * n) / (f - n)
            float scale     = projMatrix[2, 3] / (f * n) * (f - n);
            bool  depth_0_1 = Mathf.Abs(scale) < 1.5f;
            bool  reverseZ  = scale > 0;
            bool  flipProj  = projMatrix.inverse.MultiplyPoint(new Vector3(0, 1, 0)).y < 0;

            // http://www.humus.name/temp/Linearize%20depth.txt
            if (reverseZ)
            {
                zBufferParams = new Vector4(-1 + f / n, 1, -1 / f + 1 / n, 1 / f);
            }
            else
            {
                zBufferParams = new Vector4(1 - f / n, f / n, 1 / f - 1 / n, 1 / n);
            }

            projectionParams = new Vector4(flipProj ? -1 : 1, n, f, 1.0f / f);

            float orthoHeight = camera.orthographic ? 2 * camera.orthographicSize : 0;
            float orthoWidth  = orthoHeight * camera.aspect;

            unity_OrthoParams = new Vector4(orthoWidth, orthoHeight, 0, camera.orthographic ? 1 : 0);

            frustum = Frustum.Create(viewProjMatrix, depth_0_1, reverseZ);

            // Left, right, top, bottom, near, far.
            for (int i = 0; i < 6; i++)
            {
                frustumPlaneEquations[i] = new Vector4(frustum.planes[i].normal.x, frustum.planes[i].normal.y, frustum.planes[i].normal.z, frustum.planes[i].distance);
            }

            m_LastFrameActive = Time.frameCount;

            m_ActualWidth  = camera.pixelWidth;
            m_ActualHeight = camera.pixelHeight;
            var screenWidth  = m_ActualWidth;
            var screenHeight = m_ActualHeight;

//forest-begin: Added XboxOne to define around XR code
#if !UNITY_SWITCH && !UNITY_XBOXONE
//forest-end:
            if (m_frameSettings.enableStereo)
            {
                screenWidth  = XRSettings.eyeTextureWidth;
                screenHeight = XRSettings.eyeTextureHeight;

                var xrDesc = XRSettings.eyeTextureDesc;
                m_ActualWidth  = xrDesc.width;
                m_ActualHeight = xrDesc.height;

                ConfigureStereoMatrices();
            }
#endif

            // Unfortunately sometime (like in the HDCameraEditor) HDUtils.hdrpSettings can be null because of scripts that change the current pipeline...
            m_msaaSamples = HDUtils.hdrpSettings != null ? HDUtils.hdrpSettings.msaaSampleCount : MSAASamples.None;
            RTHandles.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_frameSettings.enableMSAA, m_msaaSamples);
            m_HistoryRTSystem.SetReferenceSize(m_ActualWidth, m_ActualHeight, m_frameSettings.enableMSAA, m_msaaSamples);
            m_HistoryRTSystem.Swap();

            int maxWidth  = RTHandles.maxWidth;
            int maxHeight = RTHandles.maxHeight;
            m_ViewportScalePreviousFrame  = m_ViewportScaleCurrentFrame; // Double-buffer
            m_ViewportScaleCurrentFrame.x = (float)m_ActualWidth / maxWidth;
            m_ViewportScaleCurrentFrame.y = (float)m_ActualHeight / maxHeight;

            screenSize   = new Vector4(screenWidth, screenHeight, 1.0f / screenWidth, 1.0f / screenHeight);
            screenParams = new Vector4(screenSize.x, screenSize.y, 1 + screenSize.z, 1 + screenSize.w);

            if (vlSys != null)
            {
                vlSys.UpdatePerCameraData(this);
            }
        }
        private static List <PostProcessVolume> GetDynamicBrainVolumes(CinemachineBrain brain, PostProcessLayer ppLayer, int minVolumes)
        {
            GameObject gameObject = null;
            Transform  transform  = brain.transform;
            int        childCount = transform.childCount;

            CinemachinePostProcessing.sVolumes.Clear();
            int num = 0;

            while (gameObject == null && num < childCount)
            {
                GameObject gameObject2 = transform.GetChild(num).gameObject;
                if (gameObject2.hideFlags == HideFlags.HideAndDontSave)
                {
                    gameObject2.GetComponents <PostProcessVolume>(CinemachinePostProcessing.sVolumes);
                    if (CinemachinePostProcessing.sVolumes.Count > 0)
                    {
                        gameObject = gameObject2;
                    }
                }
                num++;
            }
            if (minVolumes > 0)
            {
                if (gameObject == null)
                {
                    gameObject                  = new GameObject(CinemachinePostProcessing.sVolumeOwnerName);
                    gameObject.hideFlags        = HideFlags.HideAndDontSave;
                    gameObject.transform.parent = transform;
                }
                int value = ppLayer.volumeLayer.value;
                for (int i = 0; i < 32; i++)
                {
                    if ((value & 1 << i) != 0)
                    {
                        gameObject.layer = i;
                        break;
                    }
                }
                while (CinemachinePostProcessing.sVolumes.Count < minVolumes)
                {
                    CinemachinePostProcessing.sVolumes.Add(gameObject.gameObject.AddComponent <PostProcessVolume>());
                }
            }
            return(CinemachinePostProcessing.sVolumes);
        }
Esempio n. 23
0
    protected override void Render(ScriptableRenderContext context, Camera[] cameras)
    {
        BeginFrameRendering(context, cameras);

        foreach (Camera camera in cameras)
        {
            BeginCameraRendering(context, camera);

            //Culling
            ScriptableCullingParameters cullingParams;
            if (!camera.TryGetCullingParameters(out cullingParams))
            {
                continue;
            }
            CullingResults cull = context.Cull(ref cullingParams);

            //Camera setup some builtin variables e.g. camera projection matrices etc
            context.SetupCameraProperties(camera);

            //Get the setting from camera component
            bool drawSkyBox = camera.clearFlags == CameraClearFlags.Skybox? true : false;
            bool clearDepth = camera.clearFlags == CameraClearFlags.Nothing? false : true;
            bool clearColor = camera.clearFlags == CameraClearFlags.Color? true : false;

            //************************** Set TempRT ************************************

            CommandBuffer cmdTempId = new CommandBuffer();
            cmdTempId.name = "(" + camera.name + ")" + "Setup TempRT";

            //Color
            m_ColorFormatActive = camera.allowHDR ? m_ColorFormatHDR : m_ColorFormat;
            RenderTextureDescriptor colorRTDesc = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
            colorRTDesc.graphicsFormat  = m_ColorFormatActive;
            colorRTDesc.depthBufferBits = depthBufferBits;
            //colorRTDesc.sRGB = ;
            colorRTDesc.msaaSamples       = camera.allowMSAA ? QualitySettings.antiAliasing : 1;
            colorRTDesc.enableRandomWrite = false;
            cmdTempId.GetTemporaryRT(m_ColorRTid, colorRTDesc, FilterMode.Bilinear);

            //Depth
            RenderTextureDescriptor depthRTDesc = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
            depthRTDesc.colorFormat     = RenderTextureFormat.Depth;
            depthRTDesc.depthBufferBits = depthBufferBits;
            cmdTempId.GetTemporaryRT(m_DepthRTid, depthRTDesc, FilterMode.Bilinear);

            //MotionVector
            RenderTextureDescriptor motionvectorRTDesc = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
            motionvectorRTDesc.graphicsFormat  = UnityEngine.Experimental.Rendering.GraphicsFormat.R16G16_SFloat;
            motionvectorRTDesc.depthBufferBits = depthBufferBits;
            //colorRTDesc.sRGB = ;
            motionvectorRTDesc.msaaSamples       = 1;
            motionvectorRTDesc.enableRandomWrite = false;
            cmdTempId.GetTemporaryRT(m_MotionVectorRTid, motionvectorRTDesc, FilterMode.Bilinear);

            context.ExecuteCommandBuffer(cmdTempId);

            cmdTempId.Release();

            //************************** Setup DrawSettings and FilterSettings ************************************

            camera.depthTextureMode |= DepthTextureMode.MotionVectors | DepthTextureMode.Depth;

            var sortingSettings = new SortingSettings(camera);

            DrawingSettings   drawSettings   = new DrawingSettings(m_PassName, sortingSettings);
            FilteringSettings filterSettings = new FilteringSettings(RenderQueueRange.all);

            DrawingSettings drawSettingsMotionVector = new DrawingSettings(m_PassName, sortingSettings)
            {
                perObjectData             = PerObjectData.MotionVectors,
                overrideMaterial          = motionVectorMaterial,
                overrideMaterialPassIndex = 0
            };
            FilteringSettings filterSettingsMotionVector = new FilteringSettings(RenderQueueRange.all)
            {
                excludeMotionVectorObjects = false
            };

            DrawingSettings drawSettingsDepth = new DrawingSettings(m_PassName, sortingSettings)
            {
                //perObjectData = PerObjectData.None,
                overrideMaterial          = depthOnlyMaterial,
                overrideMaterialPassIndex = 0,
            };
            FilteringSettings filterSettingsDepth = new FilteringSettings(RenderQueueRange.all);

            //************************** Rendering depth ************************************

            //Set RenderTarget & Camera clear flag
            CommandBuffer cmdDepth = new CommandBuffer();
            cmdDepth.name = "(" + camera.name + ")" + "Depth Clear Flag";
            cmdDepth.SetRenderTarget(m_DepthRT); //Set CameraTarget to the depth texture
            cmdDepth.ClearRenderTarget(true, true, Color.black);
            context.ExecuteCommandBuffer(cmdDepth);
            cmdDepth.Release();

            //Opaque objects
            sortingSettings.criteria             = SortingCriteria.CommonOpaque;
            drawSettingsDepth.sortingSettings    = sortingSettings;
            filterSettingsDepth.renderQueueRange = RenderQueueRange.opaque;
            context.DrawRenderers(cull, ref drawSettingsDepth, ref filterSettingsDepth);

            //To let shader has _CameraDepthTexture
            CommandBuffer cmdDepthTexture = new CommandBuffer();
            cmdDepthTexture.name = "(" + camera.name + ")" + "Depth Texture";
            cmdDepthTexture.SetGlobalTexture(m_DepthRTid, m_DepthRT);
            context.ExecuteCommandBuffer(cmdDepthTexture);
            cmdDepthTexture.Release();

            //************************** Rendering motion vectors ************************************

            //Camera clear flag
            CommandBuffer cmdMotionvector = new CommandBuffer();
            cmdMotionvector.SetRenderTarget(m_MotionVectorRT); //Set CameraTarget to the motion vector texture
            cmdMotionvector.ClearRenderTarget(true, true, Color.black);
            context.ExecuteCommandBuffer(cmdMotionvector);
            cmdMotionvector.Release();

            //Opaque objects
            sortingSettings.criteria = SortingCriteria.CommonOpaque;
            drawSettingsMotionVector.sortingSettings    = sortingSettings;
            filterSettingsMotionVector.renderQueueRange = RenderQueueRange.opaque;
            context.DrawRenderers(cull, ref drawSettingsMotionVector, ref filterSettingsMotionVector);

            //Camera motion vector
            CommandBuffer cmdCameraMotionVector = new CommandBuffer();
            cmdCameraMotionVector.name = "(" + camera.name + ")" + "Camera MotionVector";
            _NonJitteredVP             = camera.nonJitteredProjectionMatrix * camera.worldToCameraMatrix;
            cmdCameraMotionVector.SetGlobalMatrix("_CamPrevViewProjMatrix", _PreviousVP);
            cmdCameraMotionVector.SetGlobalMatrix("_CamNonJitteredViewProjMatrix", _NonJitteredVP);
            cmdCameraMotionVector.SetViewProjectionMatrices(Matrix4x4.identity, Matrix4x4.identity);
            cmdCameraMotionVector.DrawMesh(fullscreenMesh, Matrix4x4.identity, motionVectorMaterial, 0, 1, null);  // draw full screen quad to make Camera motion
            cmdCameraMotionVector.SetViewProjectionMatrices(camera.worldToCameraMatrix, camera.projectionMatrix);
            context.ExecuteCommandBuffer(cmdCameraMotionVector);
            cmdCameraMotionVector.Release();

            //To let shader has MotionVectorTexture
            CommandBuffer cmdMotionVectorTexture = new CommandBuffer();
            cmdMotionVectorTexture.name = "(" + camera.name + ")" + "MotionVector Texture";
            cmdMotionVectorTexture.SetGlobalTexture(m_MotionVectorRTid, m_MotionVectorRT);
            context.ExecuteCommandBuffer(cmdMotionVectorTexture);
            cmdMotionVectorTexture.Release();

            //************************** Rendering color ************************************

            //Camera clear flag
            CommandBuffer cmd = new CommandBuffer();
            cmd.SetRenderTarget(m_ColorRT); //Set CameraTarget to the color texture
            cmd.ClearRenderTarget(clearDepth, clearColor, camera.backgroundColor);
            context.ExecuteCommandBuffer(cmd);
            cmd.Release();

            //Skybox
            if (drawSkyBox)
            {
                context.DrawSkybox(camera);
            }

            //Opaque objects
            sortingSettings.criteria        = SortingCriteria.CommonOpaque;
            drawSettings.sortingSettings    = sortingSettings;
            filterSettings.renderQueueRange = RenderQueueRange.opaque;
            context.DrawRenderers(cull, ref drawSettings, ref filterSettings);

            //************************** SetUp Post-processing ************************************

            PostProcessLayer m_CameraPostProcessLayer = camera.GetComponent <PostProcessLayer>();
            bool             hasPostProcessing        = m_CameraPostProcessLayer != null;
            bool             usePostProcessing        = false;
            //bool hasOpaqueOnlyEffects = false;
            PostProcessRenderContext m_PostProcessRenderContext = null;
            if (hasPostProcessing)
            {
                m_PostProcessRenderContext = new PostProcessRenderContext();
                usePostProcessing          = m_CameraPostProcessLayer.enabled;
                //hasOpaqueOnlyEffects = m_CameraPostProcessLayer.HasOpaqueOnlyEffects(m_PostProcessRenderContext);
            }

            //************************** Opaque Post-processing ************************************
            //Ambient Occlusion, Screen-spaced reflection are generally not supported for SRP
            //So this part is only for custom opaque post-processing
            // if(usePostProcessing)
            // {
            //     CommandBuffer cmdpp = new CommandBuffer();
            //     cmdpp.name = "("+camera.name+")"+ "Post-processing Opaque";

            //     m_PostProcessRenderContext.Reset();
            //     m_PostProcessRenderContext.camera = camera;
            //     m_PostProcessRenderContext.source = m_ColorRT;
            //     m_PostProcessRenderContext.sourceFormat = UnityEngine.Experimental.Rendering.GraphicsFormatUtility.GetRenderTextureFormat(m_ColorFormatActive);
            //     m_PostProcessRenderContext.destination = m_ColorRT;
            //     m_PostProcessRenderContext.command = cmdpp;
            //     m_PostProcessRenderContext.flip = camera.targetTexture == null;
            //     m_CameraPostProcessLayer.RenderOpaqueOnly(m_PostProcessRenderContext);

            //     context.ExecuteCommandBuffer(cmdpp);
            //     cmdpp.Release();
            // }

            //************************** Rendering Transparent Objects ************************************

            sortingSettings.criteria        = SortingCriteria.CommonTransparent;
            drawSettings.sortingSettings    = sortingSettings;
            filterSettings.renderQueueRange = RenderQueueRange.transparent;
            context.DrawRenderers(cull, ref drawSettings, ref filterSettings);

            //************************** Transparent Post-processing ************************************
            //Bloom, Vignette, Grain, ColorGrading, LensDistortion, Chromatic Aberration, Auto Exposure, Motion Blur
            if (usePostProcessing)
            {
                CommandBuffer cmdpp = new CommandBuffer();
                cmdpp.name = "(" + camera.name + ")" + "Post-processing Transparent";

                m_PostProcessRenderContext.Reset();
                m_PostProcessRenderContext.camera       = camera;
                m_PostProcessRenderContext.source       = m_ColorRT;
                m_PostProcessRenderContext.sourceFormat = UnityEngine.Experimental.Rendering.GraphicsFormatUtility.GetRenderTextureFormat(m_ColorFormatActive);
                m_PostProcessRenderContext.destination  = BuiltinRenderTextureType.CameraTarget;
                m_PostProcessRenderContext.command      = cmdpp;
                m_PostProcessRenderContext.flip         = camera.targetTexture == null;
                m_CameraPostProcessLayer.Render(m_PostProcessRenderContext);

                context.ExecuteCommandBuffer(cmdpp);
                cmdpp.Release();
            }
            else
            {
                //Make sure screen has the thing when Postprocessing is off
                CommandBuffer cmdBlitToCam = new CommandBuffer();
                cmdBlitToCam.name = "(" + camera.name + ")" + "Blit back to Camera";
                cmdBlitToCam.Blit(m_ColorRTid, BuiltinRenderTextureType.CameraTarget);
                context.ExecuteCommandBuffer(cmdBlitToCam);
                cmdBlitToCam.Release();
            }

            //************************** Debug ************************************

            if (_motionVectorDebug)
            {
                CommandBuffer cmdDebug = new CommandBuffer();
                RenderTexture rt;
                rt = new RenderTexture(colorRTDesc);
                cmdDebug.Blit(BuiltinRenderTextureType.CameraTarget, BuiltinRenderTextureType.CameraTarget, motionVectorDebugMaterial);
                cmdDebug.Blit(BuiltinRenderTextureType.CameraTarget, rt, motionVectorDebugMaterial);
                //MyDebug(camera, context, rt, colorRTDesc, cmdDebug);
                context.ExecuteCommandBuffer(cmdDebug);
                cmdDebug.Release();
                rt.Release();
            }

            //************************** Clean Up ************************************

            CommandBuffer cmdclean = new CommandBuffer();
            cmdclean.name = "(" + camera.name + ")" + "Clean Up";
            cmdclean.ReleaseTemporaryRT(m_ColorRTid);
            cmdclean.ReleaseTemporaryRT(m_DepthRTid);
            cmdclean.ReleaseTemporaryRT(m_MotionVectorRTid);
            context.ExecuteCommandBuffer(cmdclean);
            cmdclean.Release();

            context.Submit();

            //For camera motion vector
            _PreviousVP = _NonJitteredVP;

            EndCameraRendering(context, camera);
        }

        EndFrameRendering(context, cameras);
    }
Esempio n. 24
0
    //Starts Rendering Part
    public static void Render(ScriptableRenderContext context, IEnumerable <Camera> cameras)
    {
        //For shadowmapping, the matrices from the light's point of view
        Matrix4x4 view             = Matrix4x4.identity;
        Matrix4x4 proj             = Matrix4x4.identity;
        bool      successShadowMap = false;

        foreach (Camera camera in cameras)
        {
            bool isSceneViewCam = camera.cameraType == CameraType.SceneView;
            //************************** UGUI Geometry on scene view *************************
            #if UNITY_EDITOR
            if (isSceneViewCam)
            {
                ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);
            }
            #endif

            //************************** Culling ****************************************
            ScriptableCullingParameters cullingParams;
            if (!CullResults.GetCullingParameters(camera, out cullingParams))
            {
                continue;
            }
            CullResults cull = new CullResults();
            CullResults.Cull(ref cullingParams, context, ref cull);

            //************************** Lighting Variables *****************************
            CommandBuffer cmdLighting = new CommandBuffer();
            cmdLighting.name = "(" + camera.name + ")" + "Lighting variable";
            int   mainLightIndex = -1;
            Light mainLight      = null;

            Vector4[] lightPositions = new Vector4[8];
            Vector4[] lightColors    = new Vector4[8];
            Vector4[] lightAttn      = new Vector4[8];
            Vector4[] lightSpotDir   = new Vector4[8];

            //Initialise values
            for (int i = 0; i < 8; i++)
            {
                lightPositions[i] = new Vector4(0.0f, 0.0f, 1.0f, 0.0f);
                lightColors[i]    = Color.black;
                lightAttn[i]      = new Vector4(0.0f, 1.0f, 0.0f, 1.0f);
                lightSpotDir[i]   = new Vector4(0.0f, 0.0f, 1.0f, 0.0f);
            }

            for (int i = 0; i < cull.visibleLights.Count; i++)
            {
                VisibleLight light = cull.visibleLights[i];

                if (mainLightIndex == -1) //Directional light
                {
                    if (light.lightType == LightType.Directional)
                    {
                        Vector4 dir = light.localToWorld.GetColumn(2);
                        lightPositions[0] = new Vector4(-dir.x, -dir.y, -dir.z, 0);
                        lightColors[0]    = light.light.color;

                        float lightRangeSqr                 = light.range * light.range;
                        float fadeStartDistanceSqr          = 0.8f * 0.8f * lightRangeSqr;
                        float fadeRangeSqr                  = (fadeStartDistanceSqr - lightRangeSqr);
                        float oneOverFadeRangeSqr           = 1.0f / fadeRangeSqr;
                        float lightRangeSqrOverFadeRangeSqr = -lightRangeSqr / fadeRangeSqr;
                        float quadAtten = 25.0f / lightRangeSqr;
                        lightAttn[0] = new Vector4(quadAtten, oneOverFadeRangeSqr, lightRangeSqrOverFadeRangeSqr, 1.0f);

                        cmdLighting.SetGlobalVector("_LightColor0", lightColors[0]);
                        cmdLighting.SetGlobalVector("_WorldSpaceLightPos0", lightPositions[0]);

                        mainLight      = light.light;
                        mainLightIndex = i;
                    }
                }
                else
                {
                    continue;//so far just do only 1 directional light
                }
            }

            cmdLighting.SetGlobalVectorArray("unity_LightPosition", lightPositions);
            cmdLighting.SetGlobalVectorArray("unity_LightColor", lightColors);
            cmdLighting.SetGlobalVectorArray("unity_LightAtten", lightAttn);
            cmdLighting.SetGlobalVectorArray("unity_SpotDirection", lightSpotDir);

            context.ExecuteCommandBuffer(cmdLighting);
            cmdLighting.Release();

            //************************** Draw Settings ************************************
            FilterRenderersSettings filterSettings = new FilterRenderersSettings(true);

            DrawRendererSettings drawSettingsDefault = new DrawRendererSettings(camera, passNameDefault);
            drawSettingsDefault.rendererConfiguration = renderConfig;
            drawSettingsDefault.flags = DrawRendererFlags.EnableDynamicBatching;
            drawSettingsDefault.SetShaderPassName(5, m_UnlitPassName);

            DrawRendererSettings drawSettingsBase = new DrawRendererSettings(camera, passNameBase);
            drawSettingsBase.flags = DrawRendererFlags.EnableDynamicBatching;
            drawSettingsBase.rendererConfiguration = renderConfig;

            DrawRendererSettings drawSettingsAdd = new DrawRendererSettings(camera, passNameAdd);
            drawSettingsAdd.flags = DrawRendererFlags.EnableDynamicBatching;
            drawSettingsAdd.rendererConfiguration = renderConfig;

            DrawRendererSettings drawSettingsDepth = new DrawRendererSettings(camera, passNameShadow);
            drawSettingsDepth.flags = DrawRendererFlags.EnableDynamicBatching;
            //drawSettingsBase.rendererConfiguration = renderConfig;

            //************************** Set TempRT ************************************
            CommandBuffer cmdTempId = new CommandBuffer();
            cmdTempId.name = "(" + camera.name + ")" + "Setup TempRT";

            //Depth
            RenderTextureDescriptor depthRTDesc = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
            depthRTDesc.colorFormat     = m_DepthFormat;
            depthRTDesc.depthBufferBits = depthBufferBits;
            cmdTempId.GetTemporaryRT(m_DepthRTid, depthRTDesc, FilterMode.Bilinear);

            //Color
            RenderTextureDescriptor colorRTDesc = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
            colorRTDesc.colorFormat       = m_ColorFormat;
            colorRTDesc.depthBufferBits   = depthBufferBits; //have depth because we don't want to ruin the _CameraDepthTexture
            colorRTDesc.sRGB              = true;
            colorRTDesc.msaaSamples       = 1;
            colorRTDesc.enableRandomWrite = false;
            cmdTempId.GetTemporaryRT(m_ColorRTid, colorRTDesc, FilterMode.Bilinear);

            //Shadow
            RenderTextureDescriptor shadowRTDesc = new RenderTextureDescriptor(m_ShadowRes, m_ShadowRes);
            shadowRTDesc.colorFormat       = m_ShadowFormat;
            shadowRTDesc.depthBufferBits   = depthBufferBits; //have depth because it is also a depth texture
            shadowRTDesc.msaaSamples       = 1;
            shadowRTDesc.enableRandomWrite = false;
            cmdTempId.GetTemporaryRT(m_ShadowMapLightid, shadowRTDesc, FilterMode.Bilinear);//depth per light

            //ScreenSpaceShadowMap
            RenderTextureDescriptor shadowMapRTDesc = new RenderTextureDescriptor(camera.pixelWidth, camera.pixelHeight);
            shadowMapRTDesc.colorFormat       = m_ShadowMapFormat;
            shadowMapRTDesc.depthBufferBits   = 0;
            shadowMapRTDesc.msaaSamples       = 1;
            shadowMapRTDesc.enableRandomWrite = false;
            cmdTempId.GetTemporaryRT(m_ShadowMapid, shadowMapRTDesc, FilterMode.Bilinear);//screen space shadow

            context.ExecuteCommandBuffer(cmdTempId);
            cmdTempId.Release();

            //************************** Do shadow? ************************************
            Bounds bounds;
            bool   doShadow = cull.GetShadowCasterBounds(mainLightIndex, out bounds);

            //************************** Shadow Mapping ************************************
            if (doShadow && !isSceneViewCam)
            {
                DrawShadowsSettings shadowSettings = new DrawShadowsSettings(cull, mainLightIndex);

                successShadowMap = cull.ComputeDirectionalShadowMatricesAndCullingPrimitives(mainLightIndex,
                                                                                             0, 1, new Vector3(1, 0, 0),
                                                                                             m_ShadowRes, mainLight.shadowNearPlane, out view, out proj,
                                                                                             out shadowSettings.splitData);

                CommandBuffer cmdShadow = new CommandBuffer();
                cmdShadow.name = "(" + camera.name + ")" + "Shadow Mapping";

                cmdShadow.SetRenderTarget(m_ShadowMapLight);
                cmdShadow.ClearRenderTarget(true, true, Color.black);

                //Change the view to light's point of view
                cmdShadow.SetViewport(new Rect(0, 0, m_ShadowRes, m_ShadowRes));
                cmdShadow.EnableScissorRect(new Rect(4, 4, m_ShadowRes - 8, m_ShadowRes - 8));
                cmdShadow.SetViewProjectionMatrices(view, proj);

                context.ExecuteCommandBuffer(cmdShadow);
                cmdShadow.Clear();

                //Render Shadowmap
                context.DrawShadows(ref shadowSettings);

                cmdShadow.DisableScissorRect();
                cmdShadow.SetGlobalTexture(m_ShadowMapLightid, m_ShadowMapLight);
                context.ExecuteCommandBuffer(cmdShadow);
                cmdShadow.Clear();
                cmdShadow.Release();
            }

            //************************** Camera Parameters ************************************
            context.SetupCameraProperties(camera);

            //************************** Depth (for CameraDepthTexture) ************************************
            CommandBuffer cmdDepthOpaque = new CommandBuffer();
            cmdDepthOpaque.name = "(" + camera.name + ")" + "Make CameraDepthTexture";

            cmdDepthOpaque.SetRenderTarget(m_DepthRT);
            cmdDepthOpaque.ClearRenderTarget(true, true, Color.black);
            context.ExecuteCommandBuffer(cmdDepthOpaque);
            cmdDepthOpaque.Clear();

            filterSettings.renderQueueRange = RenderQueueRange.opaque;
            drawSettingsDepth.sorting.flags = SortFlags.CommonOpaque;
            context.DrawRenderers(cull.visibleRenderers, ref drawSettingsDepth, filterSettings);

            cmdDepthOpaque.SetGlobalTexture(m_DepthRTid, m_DepthRT);
            context.ExecuteCommandBuffer(cmdDepthOpaque);
            cmdDepthOpaque.Release();

            //************************** Screen Space Shadow ************************************
            if (doShadow)
            {
                CommandBuffer cmdShadow2 = new CommandBuffer();
                cmdShadow2.name = "(" + camera.name + ")" + "Screen Space Shadow";

                //Bias
                if (mainLight != null)
                {
                    float sign = (SystemInfo.usesReversedZBuffer) ? 1.0f : -1.0f;
                    if (isSceneViewCam)
                    {
                        sign = -sign * 0.01f;
                    }
                    float bias = mainLight.shadowBias * proj.m22 * sign;

                    cmdShadow2.SetGlobalFloat("_ShadowBias", bias);
                }

                //Shadow Transform
                if (successShadowMap)
                {
                    cmdShadow2.EnableShaderKeyword("SHADOWS_SCREEN");
                    cmdShadow2.EnableShaderKeyword("LIGHTMAP_SHADOW_MIXING");

                    if (SystemInfo.usesReversedZBuffer)
                    {
                        proj.m20 = -proj.m20;
                        proj.m21 = -proj.m21;
                        proj.m22 = -proj.m22;
                        proj.m23 = -proj.m23;
                    }

                    Matrix4x4 WorldToShadow = proj * view;

                    float f = 0.5f;

                    var textureScaleAndBias = Matrix4x4.identity;
                    textureScaleAndBias.m00 = f;
                    textureScaleAndBias.m11 = f;
                    textureScaleAndBias.m22 = f;
                    textureScaleAndBias.m03 = f;
                    textureScaleAndBias.m23 = f;
                    textureScaleAndBias.m13 = f;

                    WorldToShadow = textureScaleAndBias * WorldToShadow;

                    cmdShadow2.SetGlobalMatrix("_WorldToShadow", WorldToShadow);
                    cmdShadow2.SetGlobalFloat("_ShadowStrength", mainLight.shadowStrength);
                }

                //Render the screen-space shadow
                cmdShadow2.Blit(m_ShadowMap, m_ShadowMap, m_ScreenSpaceShadowsMaterial);
                cmdShadow2.SetGlobalTexture(m_ShadowMapid, m_ShadowMap);

                context.ExecuteCommandBuffer(cmdShadow2);
                cmdShadow2.Release();
            }

            //************************** Clear ************************************
            CommandBuffer cmd = new CommandBuffer();
            cmd.name = "(" + camera.name + ")" + "Clear Flag";

            cmd.SetRenderTarget(m_ColorRT);
            ClearFlag(cmd, camera, camera.backgroundColor);

            context.ExecuteCommandBuffer(cmd);
            cmd.Release();

            //************************** Skybox ************************************
            if (camera.clearFlags == CameraClearFlags.Skybox)
            {
                context.DrawSkybox(camera);
            }

            //************************** Opaque ************************************
            filterSettings.renderQueueRange = RenderQueueRange.opaque;

            // DEFAULT pass, draw shaders without a pass name
            drawSettingsDefault.sorting.flags = SortFlags.CommonOpaque;
            context.DrawRenderers(cull.visibleRenderers, ref drawSettingsDefault, filterSettings);

            // BASE pass
            drawSettingsBase.sorting.flags = SortFlags.CommonOpaque;
            context.DrawRenderers(cull.visibleRenderers, ref drawSettingsBase, filterSettings);

            // ADD pass
            drawSettingsAdd.sorting.flags = SortFlags.CommonOpaque;
            context.DrawRenderers(cull.visibleRenderers, ref drawSettingsAdd, filterSettings);

            //************************** Blit to Camera Target ************************************
            // so that reflection probes will work + screen view buttons
            CommandBuffer cmdColorOpaque = new CommandBuffer();
            cmdColorOpaque.name = "(" + camera.name + ")" + "After opaque";

            //This blit is necessary for Windows...It makes sure the Z is correct for transparent objects
            cmdColorOpaque.Blit(m_ColorRT, BuiltinRenderTextureType.CameraTarget);
            cmdColorOpaque.SetRenderTarget(m_ColorRT);

            //"Grab" pass
            cmdColorOpaque.SetGlobalTexture(m_GrabOpaqueRTid, m_ColorRT);

            context.ExecuteCommandBuffer(cmdColorOpaque);
            cmdColorOpaque.Release();

            //************************** Transparent ************************************
            filterSettings.renderQueueRange = RenderQueueRange.transparent;

            // DEFAULT pass
            drawSettingsDefault.sorting.flags = SortFlags.CommonTransparent;
            context.DrawRenderers(cull.visibleRenderers, ref drawSettingsDefault, filterSettings);

            // BASE pass
            drawSettingsBase.sorting.flags = SortFlags.CommonTransparent;
            context.DrawRenderers(cull.visibleRenderers, ref drawSettingsBase, filterSettings);

            //************************** Blit to Camera Target ************************************
            // so that reflection probes will work + screen view buttons
            CommandBuffer cmdColor = new CommandBuffer();
            cmdColor.name = "(" + camera.name + ")" + "After transparent";
            cmdColor.Blit(m_ColorRT, BuiltinRenderTextureType.CameraTarget);
            cmdColor.SetRenderTarget(m_ColorRT);
            context.ExecuteCommandBuffer(cmdColor);
            cmdColor.Release();

            //************************** Post-processing ************************************
            m_CameraPostProcessLayer = camera.GetComponent <PostProcessLayer>();
            if (m_CameraPostProcessLayer != null && m_CameraPostProcessLayer.enabled)
            {
                CommandBuffer cmdpp = new CommandBuffer();
                cmdpp.name = "(" + camera.name + ")" + "Post-processing";

                m_PostProcessRenderContext.Reset();
                m_PostProcessRenderContext.camera       = camera;
                m_PostProcessRenderContext.source       = m_ColorRT;
                m_PostProcessRenderContext.sourceFormat = m_ColorFormat;
                m_PostProcessRenderContext.destination  = BuiltinRenderTextureType.CameraTarget;
                m_PostProcessRenderContext.command      = cmdpp;
                m_PostProcessRenderContext.flip         = camera.targetTexture == null;
                m_CameraPostProcessLayer.Render(m_PostProcessRenderContext);

                //Target is already CameraTarget

                context.ExecuteCommandBuffer(cmdpp);
                cmdpp.Release();
            }

            //************************** Scene View Fix ************************************
            #if UNITY_EDITOR
            if (isSceneViewCam)     //Copy depth to backbuffer's depth buffer
            {
                CommandBuffer cmdSceneDepth = new CommandBuffer();
                cmdSceneDepth.name = "(" + camera.name + ")" + "Copy Depth to CameraTarget";
                cmdSceneDepth.Blit(m_DepthRT, BuiltinRenderTextureType.CameraTarget, m_CopyDepthMaterial);
                context.ExecuteCommandBuffer(cmdSceneDepth);
                cmdSceneDepth.Release();
            }
            #endif

            //************************** Clean Up ************************************
            CommandBuffer cmdclean = new CommandBuffer();
            cmdclean.name = "(" + camera.name + ")" + "Clean Up";
            cmdclean.ReleaseTemporaryRT(m_ColorRTid);
            cmdclean.ReleaseTemporaryRT(m_DepthRTid);
            cmdclean.ReleaseTemporaryRT(m_ShadowMapid);
            cmdclean.ReleaseTemporaryRT(m_ShadowMapLightid);
            context.ExecuteCommandBuffer(cmdclean);
            cmdclean.Release();

            context.Submit();
        }
    }
Esempio n. 25
0
 public static bool IsTemporalAntialiasingActive(PostProcessLayer layer)
 {
     return(IsPostProcessingActive(layer) &&
            layer.antialiasingMode == PostProcessLayer.Antialiasing.TemporalAntialiasing &&
            layer.temporalAntialiasing.IsSupported());
 }
Esempio n. 26
0
        /// <summary>
        /// Enables the input profile for the currently rendering scene (menu, ksc, editor, tracking, flight, flight-map)
        /// </summary>
        /// <param name="profileName"></param>
        internal void enableProfile(string profileName)
        {
            currentProfile = null;
            Camera activeCam = getActiveCamera();

            Log.debug("TUFX: enableProfile( " + profileName + " )  scene: ( " + HighLogic.LoadedScene + " ) map: ( " + isMapScene + " ) camera: ( " + activeCam?.name + " )");
            Log.debug(System.Environment.StackTrace);
            if (previousCamera != activeCam)
            {
                Log.log("Detected change of active camera; recreating post-process objects.");
                if (volume != null)
                {
                    Log.log("Destroying existing PostProcessVolume (from previous camera).");
                    Component.DestroyImmediate(layer);
                    UnityEngine.Object.DestroyImmediate(volume.sharedProfile);
                    UnityEngine.Object.DestroyImmediate(volume);
                    layer  = null;
                    volume = null;
                }
                previousScene  = HighLogic.LoadedScene;
                wasMapScene    = isMapScene;
                previousCamera = activeCam;
            }

            Log.debug("Active Camera (hashcode): " + activeCam?.GetHashCode());
            if (activeCam == null)
            {
                Log.log("Active camera was null.  Skipping profile setup for scene: " + HighLogic.LoadedScene);
            }
            else if (!string.IsNullOrEmpty(profileName) && Profiles.ContainsKey(profileName))
            {
                Log.log("Enabling profile: " + profileName + ".  Current GameScene: " + HighLogic.LoadedScene);
                TUFXProfile tufxProfile = Profiles[profileName];
                currentProfile = tufxProfile;
                Log.debug("Profile (hashcode): " + tufxProfile?.GetHashCode() + " :: " + tufxProfile?.ProfileName);
                Log.log("Setting HDR for camera: " + activeCam.name + " to: " + tufxProfile.HDREnabled);
                activeCam.allowHDR = tufxProfile.HDREnabled;
                onAntiAliasingSelected(tufxProfile.AntiAliasing, false);
                layer = activeCam.gameObject.AddOrGetComponent <PostProcessLayer>();
                layer.Init(Resources);
                layer.volumeLayer = ~0;//everything //TODO -- fix layer assignment...
                Log.debug("Layer: " + layer?.GetHashCode());
                volume          = activeCam.gameObject.AddOrGetComponent <PostProcessVolume>();
                volume.isGlobal = true;
                volume.priority = 100;
                Log.debug("Volume: " + volume.GetHashCode());
                if (volume.sharedProfile == null)
                {
                    volume.sharedProfile = tufxProfile.GetPostProcessProfile();
                }
                else
                {
                    volume.sharedProfile.settings.Clear();
                    tufxProfile.Enable(volume);
                }
                Log.log("Profile enabled: " + profileName);
                TUFXScatteringManager.INSTANCE.debugProfileSetup(volume, layer);
            }
            else if (string.IsNullOrEmpty(profileName))
            {
                Log.log("Clearing current profile for scene: " + HighLogic.LoadedScene);
            }
            else
            {
                Log.exception("Profile load was requested for: " + profileName + ", but no profile exists for that name.");
            }
        }
Esempio n. 27
0
 private void Reset()
 {
     this.PostProcessLayer = (PostProcessLayer)((Component)this).GetComponent <PostProcessLayer>();
     this.Wetstuff         = (PlaceholderSoftware.WetStuff.WetStuff)((Component)this).GetComponent <PlaceholderSoftware.WetStuff.WetStuff>();
 }
Esempio n. 28
0
        public void Update(PostProcessLayer postProcessLayer)
        {
            // If TAA is enabled projMatrix will hold a jittered projection matrix. The original,
            // non-jittered projection matrix can be accessed via nonJitteredProjMatrix.
            bool taaEnabled = camera.cameraType == CameraType.Game &&
                              CoreUtils.IsTemporalAntialiasingActive(postProcessLayer);

            var nonJitteredCameraProj = camera.projectionMatrix;
            var cameraProj            = taaEnabled
                ? postProcessLayer.temporalAntialiasing.GetJitteredProjectionMatrix(camera)
                : nonJitteredCameraProj;

            // The actual projection matrix used in shaders is actually massaged a bit to work across all platforms
            // (different Z value ranges etc.)
            var gpuProj            = GL.GetGPUProjectionMatrix(cameraProj, true); // Had to change this from 'false'
            var gpuView            = camera.worldToCameraMatrix;
            var gpuNonJitteredProj = GL.GetGPUProjectionMatrix(nonJitteredCameraProj, true);

            var pos    = camera.transform.position;
            var relPos = pos; // World-origin-relative

            if (ShaderConfig.s_CameraRelativeRendering != 0)
            {
                // Zero out the translation component.
                gpuView.SetColumn(3, new Vector4(0, 0, 0, 1));
                relPos = Vector3.zero; // Camera-relative
            }

            var gpuVP = gpuNonJitteredProj * gpuView;

            // A camera could be rendered multiple times per frame, only updates the previous view proj & pos if needed
            if (m_LastFrameActive != Time.frameCount)
            {
                if (isFirstFrame)
                {
                    prevCameraPos      = pos;
                    prevViewProjMatrix = gpuVP;
                }
                else
                {
                    prevCameraPos      = cameraPos;
                    prevViewProjMatrix = nonJitteredViewProjMatrix;
                }

                isFirstFrame = false;

                const uint taaFrameCount = 8;
                taaFrameIndex = taaEnabled ? (uint)Time.renderedFrameCount % taaFrameCount : 0;
            }
            else
            {
                // Warning: in the Game View, outside of the Play Mode, the counter gets stuck on a random frame.
                // In this case, reset the frame index to 0.
                taaFrameIndex = 0;
            }

            viewMatrix            = gpuView;
            projMatrix            = gpuProj;
            nonJitteredProjMatrix = gpuNonJitteredProj;
            cameraPos             = pos;
            screenSize            = new Vector4(camera.pixelWidth, camera.pixelHeight, 1.0f / camera.pixelWidth, 1.0f / camera.pixelHeight);

            // Warning: near and far planes appear to be broken.
            GeometryUtility.CalculateFrustumPlanes(viewProjMatrix, frustumPlanes);

            for (int i = 0; i < 4; i++)
            {
                // Left, right, top, bottom.
                frustumPlaneEquations[i] = new Vector4(frustumPlanes[i].normal.x, frustumPlanes[i].normal.y, frustumPlanes[i].normal.z, frustumPlanes[i].distance);
            }

            // Near, far.
            frustumPlaneEquations[4] = new Vector4(camera.transform.forward.x, camera.transform.forward.y, camera.transform.forward.z, -Vector3.Dot(camera.transform.forward, relPos) - camera.nearClipPlane);
            frustumPlaneEquations[5] = new Vector4(-camera.transform.forward.x, -camera.transform.forward.y, -camera.transform.forward.z, Vector3.Dot(camera.transform.forward, relPos) + camera.farClipPlane);

            m_LastFrameActive = Time.frameCount;
        }
Esempio n. 29
0
        // functions
        void OnEnable()
        {
#if UNITY_POST_PROCESSING_STACK_V2
            PostProcessLayer postLayer = GetComponent <PostProcessLayer>();
            if (postLayer != null && postLayer.enabled)
            {
                postProcessCam           = GetComponent <Camera>();
                postProcessCam.hideFlags = HideFlags.HideInInspector;
                var camGO = new GameObject(postProcessCamName);
                camGO.hideFlags = HideFlags.HideAndDontSave;
                camGO.transform.SetParent(transform);
                camGO.transform.localPosition = Vector3.zero;
                camGO.transform.localRotation = Quaternion.identity;
                cam = camGO.AddComponent <Camera>();
                cam.CopyFrom(postProcessCam);
                Debug.Log("set up cam");
            }
            else
#endif
            {
                cam = GetComponent <Camera>();
                //Debug.Log("set up cam"+cam.projectionMatrix);
                cam.hideFlags = HideFlags.HideInInspector;
            }
            lightfieldMat = new Material(Shader.Find("Holoplay/Lightfield"));
            instance      = this;        // most recently enabled Capture set as instance
            // lightfield camera (only does blitting of the quilt into a lightfield)
            var lightfieldCamGO = new GameObject(lightfieldCamName);
            lightfieldCamGO.hideFlags = HideFlags.HideAndDontSave;
            lightfieldCamGO.transform.SetParent(transform);
            var lightfieldPost = lightfieldCamGO.AddComponent <LightfieldPostProcess>();
            lightfieldPost.holoplay = this;
            lightfieldCam           = lightfieldCamGO.AddComponent <Camera>();
#if UNITY_2017_3_OR_NEWER
            lightfieldCam.allowDynamicResolution = false;
#endif
            lightfieldCam.allowHDR    = false;
            lightfieldCam.allowMSAA   = false;
            lightfieldCam.cullingMask = 0;
            lightfieldCam.clearFlags  = CameraClearFlags.Nothing;

#if !UNITY_EDITOR
            if (instance == null)
            {
                Debug.Log("empty when running");
            }
#endif
            ReloadCalibration();

            // // load calibration
            // if (!loadResults.calibrationFound)
            //  Debug.Log("[HoloPlay] Attempting to load calibration, but none found!");
            // if (!loadResults.lkgDisplayFound)
            //  Debug.Log("[HoloPlay] No LKG display detected");

            // setup the window to play on the looking glass
            Screen.SetResolution(cal.screenWidth, cal.screenHeight, true);
#if UNITY_2019_3_OR_NEWER
            if (!Application.isEditor && targetDisplay == 0)
            {
#if UNITY_STANDALONE_OSX
                targetDisplay = PluginCore.GetLKGunityIndex(targetLKG);
                lightfieldCam.targetDisplay = targetDisplay;
                Display.displays[targetDisplay].Activate();
#else
                Display.displays[targetDisplay].Activate(0, 0, 0);

#if UNITY_STANDALONE_WIN
                Display.displays[targetDisplay].SetParams(
                    cal.screenWidth, cal.screenHeight,
                    cal.xpos, cal.ypos
                    );
                // Debug.Debug.LogFormat("{0}, {1}, {2}, {3}", cal.screenWidth, cal.screenHeight,
                //  cal.xpos, cal.ypos)
#endif
#endif
            }
#endif

            // setup the quilt
            SetupQuilt();

            // call initialization event
            if (onHoloplayReady != null)
            {
                onHoloplayReady.Invoke(loadResults);
            }
        }
 void Awake()
 {
     myLight = NightVision_Camera.GetComponent <DeferredNightVisionEffect>();
     PPlayer = NightVision_Camera.GetComponent <PostProcessLayer>();
 }