コード例 #1
0
 /// <summary>Applies <paramref name="settings"/> to <paramref name="cam"/>.</summary>
 /// <param name="cam">Camera to update.</param>
 /// <param name="settings">Settings to apply.</param>
 public static void ApplySettings(this Camera cam, CameraPositionSettings settings)
 {
     // Position
     cam.transform.position  = settings.position;
     cam.transform.rotation  = settings.rotation;
     cam.worldToCameraMatrix = settings.GetUsedWorldToCameraMatrix();
 }
コード例 #2
0
        /// <summary>
        /// Render a probe
        /// </summary>
        /// <param name="settings">The probe settings to use</param>
        /// <param name="position">The probe position to use</param>
        /// <param name="target">The target texture.</param>
        /// <param name="cameraSettings">The camera settings used during the rendering</param>
        /// <param name="cameraPositionSettings">The camera position settings used during the rendering.</param>
        /// <param name="forceFlipY">Whether to force the Y axis flipping.</param>
        /// <param name="forceInvertBackfaceCulling">Whether to force the backface culling inversion.</param>
        /// <param name="staticFlags">The static flag to use during the rendering.</param>
        /// <param name="referenceFieldOfView">The reference field of view.</param>
        /// <param name="referenceAspect">The reference aspect ratio.</param>
        public static void Render(
            ProbeSettings settings,
            ProbeCapturePositionSettings position,
            Texture target,
            out CameraSettings cameraSettings,
            out CameraPositionSettings cameraPositionSettings,
            bool forceFlipY = false,
            bool forceInvertBackfaceCulling = false,
            uint staticFlags           = 0,
            float referenceFieldOfView = 90,
            float referenceAspect      = 1
            )
        {
            // Copy settings
            ComputeCameraSettingsFromProbeSettings(
                settings, position,
                out cameraSettings, out cameraPositionSettings, 0,
                referenceFieldOfView, referenceAspect
                );

            if (forceFlipY)
            {
                cameraSettings.flipYMode = HDAdditionalCameraData.FlipYMode.ForceFlipY;
            }
            if (forceInvertBackfaceCulling)
            {
                cameraSettings.invertFaceCulling = true;
            }

            // Perform rendering
            Render(cameraSettings, cameraPositionSettings, target, staticFlags);
        }
コード例 #3
0
        internal static void ApplyObliqueNearClipPlane(
            ref ProbeSettings settings,                             // In Parameter
            ref ProbeCapturePositionSettings probePosition,         // In parameter
            ref CameraSettings cameraSettings,                      // InOut parameter
            ref CameraPositionSettings cameraPosition               // InOut parameter
            )
        {
            var proxyMatrix    = Matrix4x4.TRS(probePosition.proxyPosition, probePosition.proxyRotation, Vector3.one);
            var mirrorPosition = proxyMatrix.MultiplyPoint(settings.proxySettings.mirrorPositionProxySpace);
            var mirrorForward  = proxyMatrix.MultiplyVector(settings.proxySettings.mirrorRotationProxySpace * Vector3.forward);

            var clipPlaneCameraSpace = GeometryUtils.CameraSpacePlane(
                cameraPosition.worldToCameraMatrix,
                mirrorPosition,
                mirrorForward
                );

            var sourceProjection = Matrix4x4.Perspective(
                HDUtils.ClampFOV(cameraSettings.frustum.fieldOfView),
                cameraSettings.frustum.aspect,
                cameraSettings.frustum.nearClipPlane,
                cameraSettings.frustum.farClipPlane
                );
            var obliqueProjection = GeometryUtils.CalculateObliqueMatrix(
                sourceProjection, clipPlaneCameraSpace
                );

            cameraSettings.frustum.mode             = CameraSettings.Frustum.Mode.UseProjectionMatrixField;
            cameraSettings.frustum.projectionMatrix = obliqueProjection;
        }
コード例 #4
0
ファイル: HDProbe.cs プロジェクト: Gome70/PDT-Ship-Engineer
 public RenderData(CameraSettings camera, CameraPositionSettings position)
     : this(
         position.GetUsedWorldToCameraMatrix(),
         camera.frustum.GetUsedProjectionMatrix(),
         position.position,
         position.rotation,
         camera.frustum.fieldOfView
         )
 {
 }
コード例 #5
0
        internal static void ApplyMirroredReferenceTransform(
            ref ProbeSettings settings,                             // In Parameter
            ref ProbeCapturePositionSettings probePosition,         // In parameter
            ref CameraSettings cameraSettings,                      // InOut parameter
            ref CameraPositionSettings cameraPosition               // InOut parameter
            )
        {
            // Calculate mirror position and forward world space
            var proxyMatrix      = Matrix4x4.TRS(probePosition.proxyPosition, probePosition.proxyRotation, Vector3.one);
            var mirrorPosition   = proxyMatrix.MultiplyPoint(settings.proxySettings.mirrorPositionProxySpace);
            var mirrorForward    = proxyMatrix.MultiplyVector(settings.proxySettings.mirrorRotationProxySpace * Vector3.forward);
            var reflectionMatrix = GeometryUtils.CalculateReflectionMatrix(mirrorPosition, mirrorForward);

            // If the camera is on the reflection plane, we offset it by 0.1 mm to avoid a degenerate case.
            if (Vector3.Dot(mirrorForward, probePosition.referencePosition - mirrorPosition) < 1e-4f)
            {
                probePosition.referencePosition += 1e-4f * mirrorForward;
            }

            var worldToCameraRHS = GeometryUtils.CalculateWorldToCameraMatrixRHS(
                probePosition.referencePosition,

                // TODO: The capture camera should look at a better direction to only capture texels that
                //   will actually be sampled.
                //   The position it should look at is the center of the visible influence volume of the probe.
                //   (visible influence volume: the intersection of the frustum with the probe's influence volume).
                //   But this is not trivial to get.
                //   So currently, only look in the mirrored direction of the reference. This will capture
                //   more pixels than we want with a lesser resolution, but still work for most cases.

                // Note: looking at the center of the influence volume don't work in all cases (see case 1157921)
                probePosition.referenceRotation
                );

            cameraPosition.worldToCameraMatrix = worldToCameraRHS * reflectionMatrix;
            // We must invert the culling because we performed a plane reflection
            cameraSettings.invertFaceCulling = true;

            // Calculate capture position and rotation
            cameraPosition.position = reflectionMatrix.MultiplyPoint(probePosition.referencePosition);
            var forward = reflectionMatrix.MultiplyVector(probePosition.referenceRotation * Vector3.forward);
            var up      = reflectionMatrix.MultiplyVector(probePosition.referenceRotation * Vector3.up);

            cameraPosition.rotation = Quaternion.LookRotation(forward, up);
        }
コード例 #6
0
        public static void ComputeCameraSettingsFromProbeSettings(
            ProbeSettings settings,
            ProbeCapturePositionSettings position,
            out CameraSettings cameraSettings,
            out CameraPositionSettings cameraPositionSettings,
            float referenceFieldOfView = 90
            )
        {
            // Copy settings
            cameraSettings         = settings.camera;
            cameraPositionSettings = CameraPositionSettings.@default;

            // Update settings
            ProbeSettingsUtilities.ApplySettings(
                ref settings, ref position,
                ref cameraSettings, ref cameraPositionSettings,
                referenceFieldOfView
                );
        }
コード例 #7
0
        internal static void ApplyPlanarFrustumHandling(
            ref ProbeSettings settings,                             // In Parameter
            ref ProbeCapturePositionSettings probePosition,         // In parameter
            ref CameraSettings cameraSettings,                      // InOut parameter
            ref CameraPositionSettings cameraPosition,              // InOut parameter
            float referenceFieldOfView, float referenceAspect
            )
        {
            const float k_MaxFieldOfView = 170;

            var proxyMatrix    = Matrix4x4.TRS(probePosition.proxyPosition, probePosition.proxyRotation, Vector3.one);
            var mirrorPosition = proxyMatrix.MultiplyPoint(settings.proxySettings.mirrorPositionProxySpace);

            cameraSettings.frustum.aspect = referenceAspect;

            switch (settings.frustum.fieldOfViewMode)
            {
            case ProbeSettings.Frustum.FOVMode.Fixed:
                cameraSettings.frustum.fieldOfView = settings.frustum.fixedValue;
                break;

            case ProbeSettings.Frustum.FOVMode.Viewer:
                cameraSettings.frustum.fieldOfView = Mathf.Min(
                    referenceFieldOfView * settings.frustum.viewerScale,
                    k_MaxFieldOfView
                    );
                break;

            case ProbeSettings.Frustum.FOVMode.Automatic:
                // Dynamic FOV tries to adapt the FOV to have maximum usage of the target render texture
                //     (A lot of pixel can be discarded in the render texture). This way we can have a greater
                //     resolution for the planar with the same cost.
                cameraSettings.frustum.fieldOfView = Mathf.Min(
                    settings.influence.ComputeFOVAt(
                        probePosition.referencePosition, mirrorPosition, probePosition.influenceToWorld
                        ) * settings.frustum.automaticScale,
                    k_MaxFieldOfView
                    );
                break;
            }
        }
コード例 #8
0
        static void FixSettings(
            Texture target,
            ref ProbeSettings settings, ref ProbeCapturePositionSettings position,
            ref CameraSettings cameraSettings, ref CameraPositionSettings cameraPositionSettings
            )
        {
            // Fix a specific case
            // When rendering into a cubemap with Camera.RenderToCubemap
            // Unity will flip the image during the read back before writing into the cubemap
            // But in the end, the cubemap is flipped
            // So we force in the HDRP to flip the last blit so we have the proper flipping.
            RenderTexture rt = null;

            if ((rt = target as RenderTexture) != null &&
                rt.dimension == TextureDimension.Cube &&
                settings.type == ProbeSettings.ProbeType.ReflectionProbe &&
                SystemInfo.graphicsUVStartsAtTop)
            {
                cameraSettings.flipYMode = HDAdditionalCameraData.FlipYMode.ForceFlipY;
            }
        }
コード例 #9
0
        public static void ComputeCameraSettingsFromProbeSettings(
            ProbeSettings settings,
            ProbeCapturePositionSettings position,
            out CameraSettings cameraSettings,
            out CameraPositionSettings cameraPositionSettings,
            ulong overrideSceneCullingMask,
            float referenceFieldOfView = 90
            )
        {
            // Copy settings
            cameraSettings         = settings.cameraSettings;
            cameraPositionSettings = CameraPositionSettings.@default;

            // Update settings
            ProbeSettingsUtilities.ApplySettings(
                ref settings, ref position,
                ref cameraSettings, ref cameraPositionSettings,
                referenceFieldOfView
                );

            cameraSettings.culling.sceneCullingMaskOverride = overrideSceneCullingMask;
        }
コード例 #10
0
        /// <summary>Perform a rendering into <paramref name="target"/>.</summary>
        /// <example>
        /// How to perform standard rendering:
        /// <code>
        /// class StandardRenderingExample
        /// {
        ///     public void Render()
        ///     {
        ///         // Copy default settings
        ///         var settings = CameraRenderSettings.Default;
        ///         // Adapt default settings to our custom usage
        ///         settings.position.position = new Vector3(0, 1, 0);
        ///         settings.camera.frustum.fieldOfView = 60.0f;
        ///         // Get our render target
        ///         var rt = new RenderTexture(128, 128, 1, GraphicsFormat.B8G8R8A8_SNorm);
        ///         HDRenderUtilities.Render(settings, rt);
        ///         // Do something with rt
        ///         rt.Release();
        ///     }
        /// }
        /// </code>
        ///
        /// How to perform a cubemap rendering:
        /// <code>
        /// class CubemapRenderExample
        /// {
        ///     public void Render()
        ///     {
        ///         // Copy default settings
        ///         var settings = CameraRenderSettings.Default;
        ///         // Adapt default settings to our custom usage
        ///         settings.position.position = new Vector3(0, 1, 0);
        ///         settings.camera.physical.iso = 800.0f;
        ///         // Frustum settings are ignored and driven by the cubemap rendering
        ///         // Get our render target
        ///         var rt = new RenderTexture(128, 128, 1, GraphicsFormat.B8G8R8A8_SNorm)
        ///         {
        ///             dimension = TextureDimension.Cube
        ///         };
        ///         // The TextureDimension is detected and the renderer will perform a cubemap rendering.
        ///         HDRenderUtilities.Render(settings, rt);
        ///         // Do something with rt
        ///         rt.Release();
        ///     }
        /// }
        /// </code>
        /// </example>
        /// <param name="settings">Settings for the camera.</param>
        /// <param name="position">Position for the camera.</param>
        /// <param name="target">Target to render to.</param>
        /// <param name="staticFlags">Only used in the Editor fo cubemaps.
        /// This is bitmask of <see cref="UnityEditor.StaticEditorFlags"/> only objects with these flags will be rendered
        /// </param>
        public static void Render(
            CameraSettings settings,
            CameraPositionSettings position,
            Texture target,
            uint staticFlags = 0
            )
        {
            // Argument checking
            if (target == null)
            {
                throw new ArgumentNullException(nameof(target));
            }

            var rtTarget   = target as RenderTexture;
            var cubeTarget = target as Cubemap;

            switch (target.dimension)
            {
            case TextureDimension.Tex2D:
                if (rtTarget == null)
                {
                    throw new ArgumentException("'target' must be a RenderTexture when rendering into a 2D texture");
                }
                break;

            case TextureDimension.Cube:
                break;

            default:
                throw new ArgumentException("Rendering into a target of dimension "
                                            + $"{target.dimension} is not supported");
            }

            var camera = NewRenderingCamera();

            try
            {
                camera.ApplySettings(settings);
                camera.ApplySettings(position);

                switch (target.dimension)
                {
                case TextureDimension.Tex2D:
                {
#if DEBUG
                    Debug.LogWarning(
                        "A static flags bitmask was provided but this is ignored when rendering into a Tex2D"
                        );
#endif
                    Assert.IsNotNull(rtTarget);
                    camera.targetTexture = rtTarget;
                    camera.Render();
                    camera.targetTexture = null;
                    target.IncrementUpdateCount();
                    break;
                }

                case TextureDimension.Cube:
                {
                    Assert.IsTrue(rtTarget != null || cubeTarget != null);

                    var canHandleStaticFlags = false;
#if UNITY_EDITOR
                    canHandleStaticFlags = true;
#endif
                    // ReSharper disable ConditionIsAlwaysTrueOrFalse
                    if (canHandleStaticFlags && staticFlags != 0)
                    // ReSharper restore ConditionIsAlwaysTrueOrFalse
                    {
#if UNITY_EDITOR
                        UnityEditor.Rendering.EditorCameraUtils.RenderToCubemap(
                            camera,
                            rtTarget,
                            -1,
                            (UnityEditor.StaticEditorFlags)staticFlags
                            );
#endif
                    }
                    else
                    {
                        // ReSharper disable ConditionIsAlwaysTrueOrFalse
                        if (!canHandleStaticFlags && staticFlags != 0)
                        // ReSharper restore ConditionIsAlwaysTrueOrFalse
                        {
                            Debug.LogWarning(
                                "A static flags bitmask was provided but this is ignored in player builds"
                                );
                        }

                        if (rtTarget != null)
                        {
                            camera.RenderToCubemap(rtTarget);
                        }
                        if (cubeTarget != null)
                        {
                            camera.RenderToCubemap(cubeTarget);
                        }
                    }

                    target.IncrementUpdateCount();
                    break;
                }
                }
            }
            finally
            {
                CoreUtils.Destroy(camera.gameObject);
            }
        }
コード例 #11
0
        /// <summary>
        /// Apply <paramref name="settings"/> and <paramref name="probePosition"/> to
        /// <paramref name="cameraPosition"/> and <paramref name="cameraSettings"/>.
        /// </summary>
        /// <param name="settings">Settings to apply. (Read only)</param>
        /// <param name="probePosition">Position to apply. (Read only)</param>
        /// <param name="cameraSettings">Settings to update.</param>
        /// <param name="cameraPosition">Position to update.</param>
        public static void ApplySettings(
            ref ProbeSettings settings,                             // In Parameter
            ref ProbeCapturePositionSettings probePosition,         // In parameter
            ref CameraSettings cameraSettings,                      // InOut parameter
            ref CameraPositionSettings cameraPosition,              // InOut parameter
            float referenceFieldOfView = 90
            )
        {
            cameraSettings = settings.camera;
            // Compute the modes for each probe type
            PositionMode positionMode;
            bool         useReferenceTransformAsNearClipPlane;

            switch (settings.type)
            {
            case ProbeSettings.ProbeType.PlanarProbe:
                positionMode = PositionMode.MirrorReferenceTransformWithProbePlane;
                useReferenceTransformAsNearClipPlane = true;
                ApplyPlanarFrustumHandling(
                    ref settings, ref probePosition,
                    ref cameraSettings, ref cameraPosition,
                    referenceFieldOfView
                    );
                break;

            case ProbeSettings.ProbeType.ReflectionProbe:
                positionMode = PositionMode.UseProbeTransform;
                useReferenceTransformAsNearClipPlane = false;
                cameraSettings.frustum.mode          = CameraSettings.Frustum.Mode.ComputeProjectionMatrix;
                cameraSettings.frustum.aspect        = 1;
                cameraSettings.frustum.fieldOfView   = 90;
                break;

            default:
                throw new ArgumentOutOfRangeException();
            }

            // Update the position
            switch (positionMode)
            {
            case PositionMode.UseProbeTransform:
            {
                cameraPosition.mode = CameraPositionSettings.Mode.ComputeWorldToCameraMatrix;
                var proxyMatrix = Matrix4x4.TRS(probePosition.proxyPosition, probePosition.proxyRotation, Vector3.one);
                cameraPosition.position = proxyMatrix.MultiplyPoint(settings.proxySettings.capturePositionProxySpace);
                cameraPosition.rotation = proxyMatrix.rotation * settings.proxySettings.captureRotationProxySpace;

                // In case of probe baking, 99% of the time, orientation of the cubemap doesn't matters
                //   so, we build one without any rotation, thus we don't have to change the basis
                //   during sampling the cubemap.
                if (settings.type == ProbeSettings.ProbeType.ReflectionProbe)
                {
                    cameraPosition.rotation = Quaternion.identity;
                }
                break;
            }

            case PositionMode.MirrorReferenceTransformWithProbePlane:
            {
                cameraPosition.mode = CameraPositionSettings.Mode.UseWorldToCameraMatrixField;
                ApplyMirroredReferenceTransform(
                    ref settings, ref probePosition,
                    ref cameraSettings, ref cameraPosition
                    );
                break;
            }
            }

            // Update the clip plane
            if (useReferenceTransformAsNearClipPlane)
            {
                ApplyObliqueNearClipPlane(
                    ref settings, ref probePosition,
                    ref cameraSettings, ref cameraPosition
                    );
            }

            // Propagate the desired custom exposure
            cameraSettings.probeRangeCompressionFactor = settings.lighting.rangeCompressionFactor;

            // Frame Settings Overrides
            switch (settings.mode)
            {
            default:
            case ProbeSettings.Mode.Realtime:
                cameraSettings.defaultFrameSettings = FrameSettingsRenderType.RealtimeReflection;
                break;

            case ProbeSettings.Mode.Baked:
            case ProbeSettings.Mode.Custom:
                cameraSettings.defaultFrameSettings = FrameSettingsRenderType.CustomOrBakedReflection;
                break;
            }

            switch (settings.type)
            {
            case ProbeSettings.ProbeType.ReflectionProbe:
                cameraSettings.customRenderingSettings = true;
                break;
            }
        }