/// <summary>
        /// Render a probe
        /// </summary>
        /// <param name="settings">The probe settings to use</param>
        /// <param name="position">The probe position to use</param>
        /// <param name="target">The target texture.</param>
        /// <param name="cameraSettings">The camera settings used during the rendering</param>
        /// <param name="cameraPositionSettings">The camera position settings used during the rendering.</param>
        /// <param name="forceFlipY">Whether to force the Y axis flipping.</param>
        /// <param name="forceInvertBackfaceCulling">Whether to force the backface culling inversion.</param>
        /// <param name="staticFlags">The static flag to use during the rendering.</param>
        /// <param name="referenceFieldOfView">The reference field of view.</param>
        /// <param name="referenceAspect">The reference aspect ratio.</param>
        public static void Render(
            ProbeSettings settings,
            ProbeCapturePositionSettings position,
            Texture target,
            out CameraSettings cameraSettings,
            out CameraPositionSettings cameraPositionSettings,
            bool forceFlipY = false,
            bool forceInvertBackfaceCulling = false,
            uint staticFlags           = 0,
            float referenceFieldOfView = 90,
            float referenceAspect      = 1
            )
        {
            // Copy settings
            ComputeCameraSettingsFromProbeSettings(
                settings, position,
                out cameraSettings, out cameraPositionSettings, 0,
                referenceFieldOfView, referenceAspect
                );

            if (forceFlipY)
            {
                cameraSettings.flipYMode = HDAdditionalCameraData.FlipYMode.ForceFlipY;
            }
            if (forceInvertBackfaceCulling)
            {
                cameraSettings.invertFaceCulling = true;
            }

            // Perform rendering
            Render(cameraSettings, cameraPositionSettings, target, staticFlags);
        }
Example #2
0
        internal static void ApplyObliqueNearClipPlane(
            ref ProbeSettings settings,                             // In Parameter
            ref ProbeCapturePositionSettings probePosition,         // In parameter
            ref CameraSettings cameraSettings,                      // InOut parameter
            ref CameraPositionSettings cameraPosition               // InOut parameter
            )
        {
            var proxyMatrix    = Matrix4x4.TRS(probePosition.proxyPosition, probePosition.proxyRotation, Vector3.one);
            var mirrorPosition = proxyMatrix.MultiplyPoint(settings.proxySettings.mirrorPositionProxySpace);
            var mirrorForward  = proxyMatrix.MultiplyVector(settings.proxySettings.mirrorRotationProxySpace * Vector3.forward);

            var clipPlaneCameraSpace = GeometryUtils.CameraSpacePlane(
                cameraPosition.worldToCameraMatrix,
                mirrorPosition,
                mirrorForward
                );

            var sourceProjection = Matrix4x4.Perspective(
                HDUtils.ClampFOV(cameraSettings.frustum.fieldOfView),
                cameraSettings.frustum.aspect,
                cameraSettings.frustum.nearClipPlane,
                cameraSettings.frustum.farClipPlane
                );
            var obliqueProjection = GeometryUtils.CalculateObliqueMatrix(
                sourceProjection, clipPlaneCameraSpace
                );

            cameraSettings.frustum.mode             = CameraSettings.Frustum.Mode.UseProjectionMatrixField;
            cameraSettings.frustum.projectionMatrix = obliqueProjection;
        }
Example #3
0
        /// <summary>
        /// Generate the camera render settings and camera position to use to render a probe.
        /// </summary>
        /// <param name="settings">The probe settings to use.</param>
        /// <param name="position">The probe position to use.</param>
        /// <param name="cameras">Will receives the camera settings.</param>
        /// <param name="cameraPositions">Will receives the camera position settings.</param>
        /// <param name="cameraCubeFaces">Will receive the camera cube face settings.</param>
        /// <param name="overrideSceneCullingMask">Override of the scene culling mask.</param>
        /// <param name="renderSteps">The rendering steps to perform for this probe.</param>
        /// <param name="forceFlipY">Whether to force the Y axis flipping.</param>
        /// <param name="referenceFieldOfView">The reference field of view.</param>
        /// <param name="referenceAspect">The reference aspect ratio.</param>
        public static void GenerateRenderingSettingsFor(
            ProbeSettings settings, ProbeCapturePositionSettings position,
            List <CameraSettings> cameras, List <CameraPositionSettings> cameraPositions, List <CubemapFace> cameraCubeFaces,
            ulong overrideSceneCullingMask,
            ProbeRenderSteps renderSteps,
            bool forceFlipY            = false,
            float referenceFieldOfView = 90,
            float referenceAspect      = 1
            )
        {
            // Copy settings
            ComputeCameraSettingsFromProbeSettings(
                settings, position,
                out var cameraSettings, out var cameraPositionSettings, overrideSceneCullingMask,
                referenceFieldOfView, referenceAspect
                );

            if (forceFlipY)
            {
                cameraSettings.flipYMode = HDAdditionalCameraData.FlipYMode.ForceFlipY;
            }

            switch (settings.type)
            {
            case ProbeSettings.ProbeType.PlanarProbe:
            {
                cameras.Add(cameraSettings);
                cameraPositions.Add(cameraPositionSettings);
                cameraCubeFaces.Add(CubemapFace.Unknown);
                break;
            }

            case ProbeSettings.ProbeType.ReflectionProbe:
            {
                for (int i = 0; i < 6; ++i)
                {
                    CubemapFace face = (CubemapFace)i;
                    if (!renderSteps.HasCubeFace(face))
                    {
                        continue;
                    }
                    var cameraPositionCopy = cameraPositionSettings;
                    cameraPositionCopy.rotation = cameraPositionCopy.rotation * Quaternion.Euler(
                        s_GenerateRenderingSettingsFor_Rotations[i]
                        );
                    cameras.Add(cameraSettings);
                    cameraPositions.Add(cameraPositionCopy);
                    cameraCubeFaces.Add(face);
                }
                break;
            }
            }
        }
        static ProbeCapturePositionSettings ComputeFrom(
            HDProbe probe,
            Vector3 referencePosition, Quaternion referenceRotation
            )
        {
            var result       = new ProbeCapturePositionSettings();
            var proxyToWorld = probe.proxyToWorld;

            result.proxyPosition     = proxyToWorld.GetColumn(3);
            result.proxyRotation     = proxyToWorld.rotation;
            result.referencePosition = referencePosition;
            result.referenceRotation = referenceRotation;
            result.influenceToWorld  = probe.influenceToWorld;
            return(result);
        }
        internal static void ApplyMirroredReferenceTransform(
            ref ProbeSettings settings,                             // In Parameter
            ref ProbeCapturePositionSettings probePosition,         // In parameter
            ref CameraSettings cameraSettings,                      // InOut parameter
            ref CameraPositionSettings cameraPosition               // InOut parameter
            )
        {
            // Calculate mirror position and forward world space
            var proxyMatrix      = Matrix4x4.TRS(probePosition.proxyPosition, probePosition.proxyRotation, Vector3.one);
            var mirrorPosition   = proxyMatrix.MultiplyPoint(settings.proxySettings.mirrorPositionProxySpace);
            var mirrorForward    = proxyMatrix.MultiplyVector(settings.proxySettings.mirrorRotationProxySpace * Vector3.forward);
            var reflectionMatrix = GeometryUtils.CalculateReflectionMatrix(mirrorPosition, mirrorForward);

            // If the camera is on the reflection plane, we offset it by 0.1 mm to avoid a degenerate case.
            if (Vector3.Dot(mirrorForward, probePosition.referencePosition - mirrorPosition) < 1e-4f)
            {
                probePosition.referencePosition += 1e-4f * mirrorForward;
            }

            var worldToCameraRHS = GeometryUtils.CalculateWorldToCameraMatrixRHS(
                probePosition.referencePosition,

                // TODO: The capture camera should look at a better direction to only capture texels that
                //   will actually be sampled.
                //   The position it should look at is the center of the visible influence volume of the probe.
                //   (visible influence volume: the intersection of the frustum with the probe's influence volume).
                //   But this is not trivial to get.
                //   So currently, only look in the mirrored direction of the reference. This will capture
                //   more pixels than we want with a lesser resolution, but still work for most cases.

                // Note: looking at the center of the influence volume don't work in all cases (see case 1157921)
                probePosition.referenceRotation
                );

            cameraPosition.worldToCameraMatrix = worldToCameraRHS * reflectionMatrix;
            // We must invert the culling because we performed a plane reflection
            cameraSettings.invertFaceCulling = true;

            // Calculate capture position and rotation
            cameraPosition.position = reflectionMatrix.MultiplyPoint(probePosition.referencePosition);
            var forward = reflectionMatrix.MultiplyVector(probePosition.referenceRotation * Vector3.forward);
            var up      = reflectionMatrix.MultiplyVector(probePosition.referenceRotation * Vector3.up);

            cameraPosition.rotation = Quaternion.LookRotation(forward, up);
        }
        public static void ComputeCameraSettingsFromProbeSettings(
            ProbeSettings settings,
            ProbeCapturePositionSettings position,
            out CameraSettings cameraSettings,
            out CameraPositionSettings cameraPositionSettings,
            float referenceFieldOfView = 90
            )
        {
            // Copy settings
            cameraSettings         = settings.camera;
            cameraPositionSettings = CameraPositionSettings.@default;

            // Update settings
            ProbeSettingsUtilities.ApplySettings(
                ref settings, ref position,
                ref cameraSettings, ref cameraPositionSettings,
                referenceFieldOfView
                );
        }
 public static void Render(
     ProbeSettings settings,
     ProbeCapturePositionSettings position,
     Texture target,
     bool forceFlipY = false,
     bool forceInvertBackfaceCulling = false,
     uint staticFlags           = 0,
     float referenceFieldOfView = 90
     )
 {
     Render(
         settings, position, target,
         out _, out _,
         forceFlipY,
         forceInvertBackfaceCulling,
         staticFlags,
         referenceFieldOfView
         );
 }
        public static void GenerateRenderingSettingsFor(
            ProbeSettings settings, ProbeCapturePositionSettings position,
            List <CameraSettings> cameras, List <CameraPositionSettings> cameraPositions,
            ulong overrideSceneCullingMask,
            bool forceFlipY = false, float referenceFieldOfView = 90
            )
        {
            // Copy settings
            ComputeCameraSettingsFromProbeSettings(
                settings, position,
                out var cameraSettings, out var cameraPositionSettings, overrideSceneCullingMask,
                referenceFieldOfView
                );

            if (forceFlipY)
            {
                cameraSettings.flipYMode = HDAdditionalCameraData.FlipYMode.ForceFlipY;
            }

            switch (settings.type)
            {
            case ProbeSettings.ProbeType.PlanarProbe:
            {
                cameras.Add(cameraSettings);
                cameraPositions.Add(cameraPositionSettings);
                break;
            }

            case ProbeSettings.ProbeType.ReflectionProbe:
            {
                for (int i = 0; i < 6; ++i)
                {
                    var cameraPositionCopy = cameraPositionSettings;
                    cameraPositionCopy.rotation = cameraPositionCopy.rotation * Quaternion.Euler(
                        s_GenerateRenderingSettingsFor_Rotations[i]
                        );
                    cameras.Add(cameraSettings);
                    cameraPositions.Add(cameraPositionCopy);
                }
                break;
            }
            }
        }
        internal static void ApplyPlanarFrustumHandling(
            ref ProbeSettings settings,                             // In Parameter
            ref ProbeCapturePositionSettings probePosition,         // In parameter
            ref CameraSettings cameraSettings,                      // InOut parameter
            ref CameraPositionSettings cameraPosition,              // InOut parameter
            float referenceFieldOfView, float referenceAspect
            )
        {
            const float k_MaxFieldOfView = 170;

            var proxyMatrix    = Matrix4x4.TRS(probePosition.proxyPosition, probePosition.proxyRotation, Vector3.one);
            var mirrorPosition = proxyMatrix.MultiplyPoint(settings.proxySettings.mirrorPositionProxySpace);

            cameraSettings.frustum.aspect = referenceAspect;

            switch (settings.frustum.fieldOfViewMode)
            {
            case ProbeSettings.Frustum.FOVMode.Fixed:
                cameraSettings.frustum.fieldOfView = settings.frustum.fixedValue;
                break;

            case ProbeSettings.Frustum.FOVMode.Viewer:
                cameraSettings.frustum.fieldOfView = Mathf.Min(
                    referenceFieldOfView * settings.frustum.viewerScale,
                    k_MaxFieldOfView
                    );
                break;

            case ProbeSettings.Frustum.FOVMode.Automatic:
                // Dynamic FOV tries to adapt the FOV to have maximum usage of the target render texture
                //     (A lot of pixel can be discarded in the render texture). This way we can have a greater
                //     resolution for the planar with the same cost.
                cameraSettings.frustum.fieldOfView = Mathf.Min(
                    settings.influence.ComputeFOVAt(
                        probePosition.referencePosition, mirrorPosition, probePosition.influenceToWorld
                        ) * settings.frustum.automaticScale,
                    k_MaxFieldOfView
                    );
                break;
            }
        }
        static void FixSettings(
            Texture target,
            ref ProbeSettings settings, ref ProbeCapturePositionSettings position,
            ref CameraSettings cameraSettings, ref CameraPositionSettings cameraPositionSettings
            )
        {
            // Fix a specific case
            // When rendering into a cubemap with Camera.RenderToCubemap
            // Unity will flip the image during the read back before writing into the cubemap
            // But in the end, the cubemap is flipped
            // So we force in the HDRP to flip the last blit so we have the proper flipping.
            RenderTexture rt = null;

            if ((rt = target as RenderTexture) != null &&
                rt.dimension == TextureDimension.Cube &&
                settings.type == ProbeSettings.ProbeType.ReflectionProbe &&
                SystemInfo.graphicsUVStartsAtTop)
            {
                cameraSettings.flipYMode = HDAdditionalCameraData.FlipYMode.ForceFlipY;
            }
        }
        public static void ComputeCameraSettingsFromProbeSettings(
            ProbeSettings settings,
            ProbeCapturePositionSettings position,
            out CameraSettings cameraSettings,
            out CameraPositionSettings cameraPositionSettings,
            ulong overrideSceneCullingMask,
            float referenceFieldOfView = 90
            )
        {
            // Copy settings
            cameraSettings         = settings.cameraSettings;
            cameraPositionSettings = CameraPositionSettings.@default;

            // Update settings
            ProbeSettingsUtilities.ApplySettings(
                ref settings, ref position,
                ref cameraSettings, ref cameraPositionSettings,
                referenceFieldOfView
                );

            cameraSettings.culling.sceneCullingMaskOverride = overrideSceneCullingMask;
        }
        static ProbeCapturePositionSettings ComputeFrom(
            HDProbe probe,
            Vector3 referencePosition, Quaternion referenceRotation
            )
        {
            var result       = new ProbeCapturePositionSettings();
            var proxyToWorld = probe.proxyToWorld;

            result.proxyPosition = proxyToWorld.GetColumn(3);

            // If reference position and proxy position is exactly the same, we end up in some degeneracies triggered
            // by engine code when computing culling parameters. This is an extremely rare case, but can happen
            // in editor when focusing on the planar probe. So if that happens, we offset them 0.1 mm apart.
            if (Vector3.Distance(result.proxyPosition, referencePosition) < 1e-4f)
            {
                referencePosition += new Vector3(1e-4f, 1e-4f, 1e-4f);
            }

            result.proxyRotation     = proxyToWorld.rotation;
            result.referencePosition = referencePosition;
            result.referenceRotation = referenceRotation;
            result.influenceToWorld  = probe.influenceToWorld;
            return(result);
        }
Example #13
0
        /// <summary>
        /// Apply <paramref name="settings"/> and <paramref name="probePosition"/> to
        /// <paramref name="cameraPosition"/> and <paramref name="cameraSettings"/>.
        /// </summary>
        /// <param name="settings">Settings to apply. (Read only)</param>
        /// <param name="probePosition">Position to apply. (Read only)</param>
        /// <param name="cameraSettings">Settings to update.</param>
        /// <param name="cameraPosition">Position to update.</param>
        public static void ApplySettings(
            ref ProbeSettings settings,                             // In Parameter
            ref ProbeCapturePositionSettings probePosition,         // In parameter
            ref CameraSettings cameraSettings,                      // InOut parameter
            ref CameraPositionSettings cameraPosition,              // InOut parameter
            float referenceFieldOfView = 90
            )
        {
            cameraSettings = settings.camera;
            // Compute the modes for each probe type
            PositionMode positionMode;
            bool         useReferenceTransformAsNearClipPlane;

            switch (settings.type)
            {
            case ProbeSettings.ProbeType.PlanarProbe:
                positionMode = PositionMode.MirrorReferenceTransformWithProbePlane;
                useReferenceTransformAsNearClipPlane = true;
                ApplyPlanarFrustumHandling(
                    ref settings, ref probePosition,
                    ref cameraSettings, ref cameraPosition,
                    referenceFieldOfView
                    );
                break;

            case ProbeSettings.ProbeType.ReflectionProbe:
                positionMode = PositionMode.UseProbeTransform;
                useReferenceTransformAsNearClipPlane = false;
                cameraSettings.frustum.mode          = CameraSettings.Frustum.Mode.ComputeProjectionMatrix;
                cameraSettings.frustum.aspect        = 1;
                cameraSettings.frustum.fieldOfView   = 90;
                break;

            default:
                throw new ArgumentOutOfRangeException();
            }

            // Update the position
            switch (positionMode)
            {
            case PositionMode.UseProbeTransform:
            {
                cameraPosition.mode = CameraPositionSettings.Mode.ComputeWorldToCameraMatrix;
                var proxyMatrix = Matrix4x4.TRS(probePosition.proxyPosition, probePosition.proxyRotation, Vector3.one);
                cameraPosition.position = proxyMatrix.MultiplyPoint(settings.proxySettings.capturePositionProxySpace);
                cameraPosition.rotation = proxyMatrix.rotation * settings.proxySettings.captureRotationProxySpace;

                // In case of probe baking, 99% of the time, orientation of the cubemap doesn't matters
                //   so, we build one without any rotation, thus we don't have to change the basis
                //   during sampling the cubemap.
                if (settings.type == ProbeSettings.ProbeType.ReflectionProbe)
                {
                    cameraPosition.rotation = Quaternion.identity;
                }
                break;
            }

            case PositionMode.MirrorReferenceTransformWithProbePlane:
            {
                cameraPosition.mode = CameraPositionSettings.Mode.UseWorldToCameraMatrixField;
                ApplyMirroredReferenceTransform(
                    ref settings, ref probePosition,
                    ref cameraSettings, ref cameraPosition
                    );
                break;
            }
            }

            // Update the clip plane
            if (useReferenceTransformAsNearClipPlane)
            {
                ApplyObliqueNearClipPlane(
                    ref settings, ref probePosition,
                    ref cameraSettings, ref cameraPosition
                    );
            }

            // Propagate the desired custom exposure
            cameraSettings.probeRangeCompressionFactor = settings.lighting.rangeCompressionFactor;

            // Frame Settings Overrides
            switch (settings.mode)
            {
            default:
            case ProbeSettings.Mode.Realtime:
                cameraSettings.defaultFrameSettings = FrameSettingsRenderType.RealtimeReflection;
                break;

            case ProbeSettings.Mode.Baked:
            case ProbeSettings.Mode.Custom:
                cameraSettings.defaultFrameSettings = FrameSettingsRenderType.CustomOrBakedReflection;
                break;
            }

            switch (settings.type)
            {
            case ProbeSettings.ProbeType.ReflectionProbe:
                cameraSettings.customRenderingSettings = true;
                break;
            }
        }