diff --git a/com.unity.render-pipelines.high-definition/CHANGELOG.md b/com.unity.render-pipelines.high-definition/CHANGELOG.md index 4e44dcae455..f65a221d12d 100644 --- a/com.unity.render-pipelines.high-definition/CHANGELOG.md +++ b/com.unity.render-pipelines.high-definition/CHANGELOG.md @@ -160,6 +160,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. - Added decal layer system similar to light layer. Mesh will receive a decal when both decal layer mask matches. - Added shader graph nodes for rendering a complex eye shader. - Added more controls to contact shadows and increased quality in some parts. +- Added a physically based option in DoF volume. ### Fixed - Fix when rescale probe all direction below zero (1219246) diff --git a/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Depth-of-Field.md b/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Depth-of-Field.md index 447004a2a5b..dccf270021f 100644 --- a/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Depth-of-Field.md +++ b/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Depth-of-Field.md @@ -45,6 +45,7 @@ Depth Of Field includes [more options](More-Options.html) that you must manually | -------------------------- | ------------------------------------------------------------ | | **Resolution** | Use the drop-down to set the resolution at which HDRP processes the depth of field effect. If you target consoles that use a very high resolution (for example, 4k), select **Quarter,** because it is less resource intensive.
• **Quarter**: Uses quarter the screen resolution.
• **Half**: Uses half the screen resolution.
This property only appears when you enable [more options](More-Options.html). | | **High Quality Filtering** | Enable the checkbox to make HDRP use bicubic filtering instead of bilinear filtering. This increases the resource intensity of the Depth Of Field effect, but results in smoother visuals.
This property only appears when you enable [more options](More-Options.html). | +| **Physically Based** | Enable the checkbox to make HDRP use a more accurate but slower physically-based technique for the computation of Deph-of-Field. It is highly recommended to enable [Temporal anti-aliasing (TAA)](Anti-Aliasing) at the same time, for improved quality and performance.| diff --git a/com.unity.render-pipelines.high-definition/Editor/PostProcessing/DepthOfFieldEditor.cs b/com.unity.render-pipelines.high-definition/Editor/PostProcessing/DepthOfFieldEditor.cs index b83769a5e4e..641a2b98665 100644 --- a/com.unity.render-pipelines.high-definition/Editor/PostProcessing/DepthOfFieldEditor.cs +++ b/com.unity.render-pipelines.high-definition/Editor/PostProcessing/DepthOfFieldEditor.cs @@ -19,6 +19,9 @@ static partial class Styles public static GUIContent k_NearFocusEnd = new GUIContent("End", "Sets the distance from the Camera at which the near field does not blur anymore."); public static GUIContent k_FarFocusEnd = new GUIContent("End", "Sets the distance from the Camera at which the far field blur reaches its maximum blur radius."); + public static GUIContent k_PhysicallyBased = new GUIContent("PhysicallyBased", "Uses a more accurate but slower physically based method to compute DoF."); + + public static readonly string InfoBox = "Physically Based DoF currently has a high performance overhead. Enabling TAA is highly recommended when using this option."; } SerializedDataParameter m_FocusMode; @@ -41,6 +44,7 @@ static partial class Styles // Advanced settings SerializedDataParameter m_HighQualityFiltering; SerializedDataParameter m_Resolution; + SerializedDataParameter m_PhysicallyBased; public override bool hasAdvancedMode => true; @@ -66,6 +70,7 @@ public override void OnEnable() m_HighQualityFiltering = Unpack(o.Find("m_HighQualityFiltering")); m_Resolution = Unpack(o.Find("m_Resolution")); + m_PhysicallyBased = Unpack(o.Find("m_PhysicallyBased")); } public override void OnInspectorGUI() @@ -132,6 +137,9 @@ public override void OnInspectorGUI() EditorGUILayout.LabelField("Advanced Tweaks", EditorStyles.miniLabel); PropertyField(m_Resolution); PropertyField(m_HighQualityFiltering); + PropertyField(m_PhysicallyBased); + if(m_PhysicallyBased.value.boolValue == true) + EditorGUILayout.HelpBox(Styles.InfoBox, MessageType.Info); GUI.enabled = true; } } diff --git a/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs b/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs index 5cbdefb0ea0..fbe26589dc7 100644 --- a/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs +++ b/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs @@ -57,6 +57,7 @@ public class GeneralSection public static readonly GUIContent sampleCountQuality = EditorGUIUtility.TrTextContent("Sample Count"); public static readonly GUIContent resolutionQuality = EditorGUIUtility.TrTextContent("Resolution"); public static readonly GUIContent highQualityFiltering = EditorGUIUtility.TrTextContent("High Quality Filtering"); + public static readonly GUIContent dofPhysicallyBased = EditorGUIUtility.TrTextContent("Physically Based"); public static readonly GUIContent maxSamplesQuality = EditorGUIUtility.TrTextContent("Max Samples"); public static readonly GUIContent SSAOQualitySettingSubTitle = EditorGUIUtility.TrTextContent("Screen Space Ambient Occlusion"); diff --git a/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.cs b/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.cs index f5f7250977e..b9c0361d084 100644 --- a/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.cs +++ b/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.cs @@ -599,6 +599,7 @@ static void DrawDepthOfFieldQualitySetting(SerializedHDRenderPipelineAsset seria EditorGUILayout.PropertyField(serialized.renderPipelineSettings.postProcessQualitySettings.DoFResolution.GetArrayElementAtIndex(tier), Styles.resolutionQuality); EditorGUILayout.PropertyField(serialized.renderPipelineSettings.postProcessQualitySettings.DoFHighFilteringQuality.GetArrayElementAtIndex(tier), Styles.highQualityFiltering); + EditorGUILayout.PropertyField(serialized.renderPipelineSettings.postProcessQualitySettings.DoFPhysicallyBased.GetArrayElementAtIndex(tier), Styles.dofPhysicallyBased); --EditorGUI.indentLevel; } diff --git a/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Settings/SerializedPostProcessingQualitySettings.cs b/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Settings/SerializedPostProcessingQualitySettings.cs index 7e6dcc327f8..c6ce192174c 100644 --- a/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Settings/SerializedPostProcessingQualitySettings.cs +++ b/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Settings/SerializedPostProcessingQualitySettings.cs @@ -14,6 +14,7 @@ class SerializedPostProcessingQualitySettings public SerializedProperty FarBlurMaxRadius; public SerializedProperty DoFResolution; public SerializedProperty DoFHighFilteringQuality; + public SerializedProperty DoFPhysicallyBased; // Motion Blur public SerializedProperty MotionBlurSampleCount; @@ -36,6 +37,7 @@ public SerializedPostProcessingQualitySettings(SerializedProperty root) FarBlurMaxRadius = root.Find((GlobalPostProcessingQualitySettings s) => s.FarBlurMaxRadius); DoFResolution = root.Find((GlobalPostProcessingQualitySettings s) => s.DoFResolution); DoFHighFilteringQuality = root.Find((GlobalPostProcessingQualitySettings s) => s.DoFHighQualityFiltering); + DoFPhysicallyBased = root.Find((GlobalPostProcessingQualitySettings s) => s.DoFPhysicallyBased); // Motion Blur MotionBlurSampleCount = root.Find((GlobalPostProcessingQualitySettings s) => s.MotionBlurSampleCount); diff --git a/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Components/DepthOfField.cs b/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Components/DepthOfField.cs index 5969c50c4de..1961830c0bd 100644 --- a/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Components/DepthOfField.cs +++ b/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Components/DepthOfField.cs @@ -204,6 +204,23 @@ public bool highQualityFiltering set { m_HighQualityFiltering.value = value; } } + public bool physicallyBased + { + get + { + if (!UsesQualitySettings()) + { + return m_PhysicallyBased.value; + } + else + { + int qualityLevel = (int)quality.levelAndOverride.level; + return GetPostProcessingQualitySettings().DoFPhysicallyBased[qualityLevel]; + } + } + set { m_PhysicallyBased.value = value; } + } + /// /// Specifies the resolution at which HDRP processes the depth of field effect. /// @@ -257,6 +274,9 @@ public DepthOfFieldResolution resolution [SerializeField, FormerlySerializedAs("resolution")] DepthOfFieldResolutionParameter m_Resolution = new DepthOfFieldResolutionParameter(DepthOfFieldResolution.Half); + [SerializeField] + BoolParameter m_PhysicallyBased = new BoolParameter(false); + /// /// Tells if the effect needs to be rendered or not. /// diff --git a/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/PostProcessSystem.cs b/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/PostProcessSystem.cs index f95108fcf97..ea923f258af 100644 --- a/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/PostProcessSystem.cs +++ b/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/PostProcessSystem.cs @@ -601,6 +601,8 @@ void PoolSource(ref RTHandle src, RTHandle dst) } } + bool postDoFTAAEnabled = false; + // If Path tracing is enabled, then DoF is computed in the path tracer by sampling the lens aperure (when using the physical camera mode) bool isDoFPathTraced = (camera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) && camera.volumeStack.GetComponent().enable.value && @@ -614,11 +616,32 @@ void PoolSource(ref RTHandle src, RTHandle dst) using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DepthOfField))) { var destination = m_Pool.Get(Vector2.one, m_ColorFormat); - DoDepthOfField(cmd, camera, source, destination, taaEnabled); + if (!m_DepthOfField.physicallyBased) + DoDepthOfField(cmd, camera, source, destination, taaEnabled); + else + DoPhysicallyBasedDepthOfField(cmd, camera, source, destination, taaEnabled); PoolSource(ref source, destination); + + // When physically based DoF is enabled, TAA runs two times, first to stabilize the color buffer before DoF and then after DoF to accumulate more aperture samples + if (taaEnabled && m_DepthOfField.physicallyBased) + { + var taaDestination = m_Pool.Get(Vector2.one, m_ColorFormat); + bool postDof = true; + var taaParams = PrepareTAAParameters(camera, postDof); + + GrabTemporalAntialiasingHistoryTextures(camera, out var prevHistory, out var nextHistory, postDof); + DoTemporalAntialiasing(taaParams, cmd, source, taaDestination, motionVecTexture, depthBuffer, depthMipChain, prevHistory, nextHistory, prevMVLen:null, nextMVLen:null); + PoolSource(ref source, taaDestination); + postDoFTAAEnabled = true; + } } } + if (!postDoFTAAEnabled) + { + ReleasePostDoFTAAHistoryTextures(camera); + } + // Motion blur after depth of field for aesthetic reasons (better to see motion // blurred bokeh rather than out of focus motion blur) if (m_MotionBlur.IsActive() && m_AnimatedMaterialsEnabled && !camera.resetPostProcessingHistory && m_MotionBlurFS) @@ -1437,7 +1460,7 @@ struct TemporalAntiAliasingParameters public Vector4 taaFilterWeights; } - TemporalAntiAliasingParameters PrepareTAAParameters(HDCamera camera) + TemporalAntiAliasingParameters PrepareTAAParameters(HDCamera camera, bool PostDOF = false) { TemporalAntiAliasingParameters parameters = new TemporalAntiAliasingParameters(); @@ -1450,7 +1473,7 @@ TemporalAntiAliasingParameters PrepareTAAParameters(HDCamera camera) // The anti flicker becomes much more aggressive on higher values float temporalContrastForMaxAntiFlicker = 0.7f - Mathf.Lerp(0.0f, 0.3f, Mathf.SmoothStep(0.5f, 1.0f, camera.taaAntiFlicker)); - parameters.taaParameters = new Vector4(camera.taaHistorySharpening, Mathf.Lerp(minAntiflicker, maxAntiflicker, camera.taaAntiFlicker), motionRejectionMultiplier, temporalContrastForMaxAntiFlicker); + parameters.taaParameters = new Vector4(camera.taaHistorySharpening, PostDOF ? maxAntiflicker : Mathf.Lerp(minAntiflicker, maxAntiflicker, camera.taaAntiFlicker), motionRejectionMultiplier, temporalContrastForMaxAntiFlicker); // Precompute weights used for the Blackman-Harris filter. TODO: Note that these are slightly wrong as they don't take into account the jitter size. This needs to be fixed at some point. float crossWeights = Mathf.Exp(-2.29f * 2); @@ -1489,20 +1512,27 @@ TemporalAntiAliasingParameters PrepareTAAParameters(HDCamera camera) parameters.temporalAAMaterial.EnableKeyword("ENABLE_MV_REJECTION"); } - switch (camera.TAAQuality) + if (PostDOF) { - case HDAdditionalCameraData.TAAQualityLevel.Low: - parameters.temporalAAMaterial.EnableKeyword("LOW_QUALITY"); - break; - case HDAdditionalCameraData.TAAQualityLevel.Medium: - parameters.temporalAAMaterial.EnableKeyword("MEDIUM_QUALITY"); - break; - case HDAdditionalCameraData.TAAQualityLevel.High: - parameters.temporalAAMaterial.EnableKeyword("HIGH_QUALITY"); - break; - default: - parameters.temporalAAMaterial.EnableKeyword("MEDIUM_QUALITY"); - break; + parameters.temporalAAMaterial.EnableKeyword("POST_DOF"); + } + else + { + switch (camera.TAAQuality) + { + case HDAdditionalCameraData.TAAQualityLevel.Low: + parameters.temporalAAMaterial.EnableKeyword("LOW_QUALITY"); + break; + case HDAdditionalCameraData.TAAQualityLevel.Medium: + parameters.temporalAAMaterial.EnableKeyword("MEDIUM_QUALITY"); + break; + case HDAdditionalCameraData.TAAQualityLevel.High: + parameters.temporalAAMaterial.EnableKeyword("HIGH_QUALITY"); + break; + default: + parameters.temporalAAMaterial.EnableKeyword("MEDIUM_QUALITY"); + break; + } } parameters.taaHistoryPropertyBlock = m_TAAHistoryBlitPropertyBlock; @@ -1538,7 +1568,10 @@ static void DoTemporalAntialiasing(in TemporalAntiAliasingParameters taaParams, taaParams.taaPropertyBlock.SetTexture(HDShaderIDs._CameraMotionVectorsTexture, motionVecTexture); taaParams.taaPropertyBlock.SetTexture(HDShaderIDs._InputTexture, source); taaParams.taaPropertyBlock.SetTexture(HDShaderIDs._InputHistoryTexture, prevHistory); - taaParams.taaPropertyBlock.SetTexture(HDShaderIDs._InputVelocityMagnitudeHistory, prevMVLen); + if (prevMVLen != null) + { + taaParams.taaPropertyBlock.SetTexture(HDShaderIDs._InputVelocityMagnitudeHistory, prevMVLen); + } taaParams.taaPropertyBlock.SetTexture(HDShaderIDs._DepthTexture, depthMipChain); @@ -1552,13 +1585,17 @@ static void DoTemporalAntialiasing(in TemporalAntiAliasingParameters taaParams, CoreUtils.SetRenderTarget(cmd, destination, depthBuffer); cmd.SetRandomWriteTarget(1, nextHistory); - cmd.SetRandomWriteTarget(2, nextMVLen); + if (nextMVLen != null) + { + cmd.SetRandomWriteTarget(2, nextMVLen); + } + cmd.DrawProcedural(Matrix4x4.identity, taaParams.temporalAAMaterial, 0, MeshTopology.Triangles, 3, 1, taaParams.taaPropertyBlock); cmd.DrawProcedural(Matrix4x4.identity, taaParams.temporalAAMaterial, 1, MeshTopology.Triangles, 3, 1, taaParams.taaPropertyBlock); cmd.ClearRandomWriteTargets(); } - void GrabTemporalAntialiasingHistoryTextures(HDCamera camera, out RTHandle previous, out RTHandle next) + void GrabTemporalAntialiasingHistoryTextures(HDCamera camera, out RTHandle previous, out RTHandle next, bool postDoF = false) { RTHandle Allocator(string id, int frameIndex, RTHandleSystem rtHandleSystem) { @@ -1569,9 +1606,12 @@ RTHandle Allocator(string id, int frameIndex, RTHandleSystem rtHandleSystem) ); } - next = camera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.TemporalAntialiasing) - ?? camera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.TemporalAntialiasing, Allocator, 2); - previous = camera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.TemporalAntialiasing); + int historyType = (int)(postDoF ? + HDCameraFrameHistoryType.TemporalAntialiasingPostDoF : HDCameraFrameHistoryType.TemporalAntialiasing); + + next = camera.GetCurrentFrameRT(historyType) + ?? camera.AllocHistoryFrameRT(historyType, Allocator, 2); + previous = camera.GetPreviousFrameRT(historyType); } void GrabVelocityMagnitudeHistoryTextures(HDCamera camera, out RTHandle previous, out RTHandle next) @@ -1589,6 +1629,15 @@ RTHandle Allocator(string id, int frameIndex, RTHandleSystem rtHandleSystem) ?? camera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.TAAMotionVectorMagnitude, Allocator, 2); previous = camera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.TAAMotionVectorMagnitude); } + + void ReleasePostDoFTAAHistoryTextures(HDCamera camera) + { + var rt = camera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.TemporalAntialiasingPostDoF); + if (rt != null) + { + camera.ReleaseHistoryFrameRT((int)HDCameraFrameHistoryType.TemporalAntialiasingPostDoF); + } + } #endregion #region Depth Of Field @@ -1765,21 +1814,8 @@ void DoDepthOfField(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandl if (taaEnabled) { - GrabCoCHistory(camera, out var prevCoCTex, out var nextCoCTex); - cocHistoryScale = new Vector2(camera.historyRTHandleProperties.rtHandleScale.z, camera.historyRTHandleProperties.rtHandleScale.w); - - cs = m_Resources.shaders.depthOfFieldCoCReprojectCS; - kernel = cs.FindKernel("KMain"); - cmd.SetComputeVectorParam(cs, HDShaderIDs._Params, new Vector4(camera.resetPostProcessingHistory ? 0f : 0.91f, cocHistoryScale.x, cocHistoryScale.y, 0f)); - cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputCoCTexture, fullresCoC); - cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputHistoryCoCTexture, prevCoCTex); - cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputCoCTexture, nextCoCTex); - cmd.DispatchCompute(cs, kernel, (camera.actualWidth + 7) / 8, (camera.actualHeight + 7) / 8, camera.viewCount); - - // Cleanup the main CoC texture as we don't need it anymore and use the - // re-projected one instead for the following steps - m_Pool.Recycle(fullresCoC); - fullresCoC = nextCoCTex; + bool useMips = false; + ReprojectCoCHistory(cmd, camera, useMips, ref fullresCoC); } m_HDInstance.PushFullScreenDebugTexture(camera, cmd, fullresCoC, FullScreenDebugMode.DepthOfFieldCoc); @@ -2201,21 +2237,48 @@ void DoDepthOfField(CommandBuffer cmd, HDCamera camera, RTHandle source, RTHandl m_Pool.Recycle(fullresCoC); // Already cleaned up if TAA is enabled } - static void GrabCoCHistory(HDCamera camera, out RTHandle previous, out RTHandle next) + static void GrabCoCHistory(HDCamera camera, out RTHandle previous, out RTHandle next, bool useMips = false) { RTHandle Allocator(string id, int frameIndex, RTHandleSystem rtHandleSystem) { return rtHandleSystem.Alloc( Vector2.one, TextureXR.slices, DepthBits.None, GraphicsFormat.R16_SFloat, - dimension: TextureXR.dimension, enableRandomWrite: true, useDynamicScale: true, name: $"{id} CoC History" + dimension: TextureXR.dimension, enableRandomWrite: true, useMipMap:useMips, useDynamicScale: true, name: $"{id} CoC History" ); } next = camera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.DepthOfFieldCoC) ?? camera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.DepthOfFieldCoC, Allocator, 2); + + if (useMips == true && next.rt.mipmapCount == 1) + { + camera.ReleaseHistoryFrameRT((int)HDCameraFrameHistoryType.DepthOfFieldCoC); + next = camera.AllocHistoryFrameRT((int)HDCameraFrameHistoryType.DepthOfFieldCoC, Allocator, 2); + } + previous = camera.GetPreviousFrameRT((int)HDCameraFrameHistoryType.DepthOfFieldCoC); } + void ReprojectCoCHistory(CommandBuffer cmd, HDCamera camera, bool useMips, ref RTHandle fullresCoC) + { + GrabCoCHistory(camera, out var prevCoCTex, out var nextCoCTex, useMips); + var cocHistoryScale = new Vector2(camera.historyRTHandleProperties.rtHandleScale.z, camera.historyRTHandleProperties.rtHandleScale.w); + + //Note: this reprojection creates some ghosting, we should replace it with something based on the new TAA + ComputeShader cs = m_Resources.shaders.depthOfFieldCoCReprojectCS; + int kernel = cs.FindKernel("KMain"); + cmd.SetComputeVectorParam(cs, HDShaderIDs._Params, new Vector4(camera.resetPostProcessingHistory ? 0f : 0.91f, cocHistoryScale.x, cocHistoryScale.y, 0f)); + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputCoCTexture, fullresCoC); + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputHistoryCoCTexture, prevCoCTex); + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputCoCTexture, nextCoCTex); + cmd.DispatchCompute(cs, kernel, (camera.actualWidth + 7) / 8, (camera.actualHeight + 7) / 8, camera.viewCount); + + // Cleanup the main CoC texture as we don't need it anymore and use the + // re-projected one instead for the following steps + m_Pool.Recycle(fullresCoC); + fullresCoC = nextCoCTex; + } + #endregion #region Depth Of Field (Physically based) @@ -2229,7 +2292,7 @@ void DoPhysicallyBasedDepthOfField(CommandBuffer cmd, HDCamera camera, RTHandle // Map the old "max radius" parameters to a bigger range, so we can work on more challenging scenes float maxRadius = Mathf.Max(m_DepthOfField.farMaxBlur, m_DepthOfField.nearMaxBlur); - float cocLimit = Mathf.Clamp(2 * maxRadius, 1, 32); + float cocLimit = Mathf.Clamp(4 * maxRadius, 1, 32); ComputeShader cs; int kernel; @@ -2271,11 +2334,18 @@ void DoPhysicallyBasedDepthOfField(CommandBuffer cmd, HDCamera camera, RTHandle float nearStart = Mathf.Min(m_DepthOfField.nearFocusStart.value, nearEnd - 1e-5f); float farStart = Mathf.Max(m_DepthOfField.farFocusStart.value, nearEnd); float farEnd = Mathf.Max(m_DepthOfField.farFocusEnd.value, farStart + 1e-5f); - cmd.SetComputeVectorParam(cs, HDShaderIDs._Params, new Vector4(nearStart, nearEnd, farStart, farEnd)); + cmd.SetComputeVectorParam(cs, HDShaderIDs._Params, new Vector4(farStart, nearEnd, 1.0f / (farEnd - farStart), 1.0f / (nearStart - nearEnd))); + cmd.SetComputeVectorParam(cs, HDShaderIDs._Params2, new Vector4(m_DepthOfField.nearMaxBlur, m_DepthOfField.farMaxBlur, 0, 0)); } cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputTexture, fullresCoC); cmd.DispatchCompute(cs, kernel, (camera.actualWidth + 7) / 8, (camera.actualHeight + 7) / 8, camera.viewCount); + + if (taaEnabled) + { + bool useMips = true; + ReprojectCoCHistory(cmd, camera, useMips, ref fullresCoC); + } } using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DepthOfFieldPyramid))) @@ -2313,7 +2383,8 @@ void DoPhysicallyBasedDepthOfField(CommandBuffer cmd, HDCamera camera, RTHandle cmd.DispatchCompute(cs, kernel, (camera.actualWidth + 7) / 8, (camera.actualHeight + 7) / 8, camera.viewCount); } - m_Pool.Recycle(fullresCoC); + if (!taaEnabled) + m_Pool.Recycle(fullresCoC); } #endregion diff --git a/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFCircleOfConfusion.compute b/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFCircleOfConfusion.compute index 81e5829696a..8981d33ab2f 100644 --- a/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFCircleOfConfusion.compute +++ b/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFCircleOfConfusion.compute @@ -10,8 +10,16 @@ CBUFFER_START(cb0) float4 _Params; +float4 _Params2; CBUFFER_END +#define FarStart _Params.x +#define NearEnd _Params.y +#define FarRange _Params.z // 1 / (FarEnd - FarStart) +#define NearRange _Params.w // 1 / (NearStart - NearEnd) +#define NearMaxRadius _Params2.x +#define FarMaxRadius _Params2.y + // outpute texture RW_TEXTURE2D_X(float, _OutputTexture); @@ -50,7 +58,19 @@ void KMainCoCManual(uint3 dispatchThreadId : SV_DispatchThreadID) { UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z); - //TODO + float depth = LoadCameraDepth(dispatchThreadId.xy); + // Note: we can avoid explicit linearization by merging it with the other computations + float linearEyeDepth = LinearEyeDepth(depth, _ZBufferParams); + + float CoC = 0; + if (linearEyeDepth > FarStart) + { + CoC = FarMaxRadius * saturate((linearEyeDepth - FarStart) * FarRange); + } + else if (linearEyeDepth < NearEnd) + { + CoC = - NearMaxRadius * saturate((linearEyeDepth - NearEnd) * NearRange); + } - _OutputTexture[COORD_TEXTURE2D_X(dispatchThreadId.xy)] = 0.0f; + _OutputTexture[COORD_TEXTURE2D_X(dispatchThreadId.xy)] = CoC; } diff --git a/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGather.compute b/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGather.compute index 072d3173169..0eaa2ab3cc9 100644 --- a/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGather.compute +++ b/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGather.compute @@ -13,6 +13,10 @@ CBUFFER_START(cb0) float4 _Params; CBUFFER_END +#define NumRings _Params.x +#define MaxCoCRadius _Params.y +#define MaxCoCMipLevel _Params.z + // Input textures TEXTURE2D_X(_InputTexture); TEXTURE2D_X(_InputCoCTexture); @@ -22,24 +26,20 @@ RW_TEXTURE2D_X(CTYPE, _OutputTexture); // A set of Defines to fine-tune the algorithm #define NUM_BUCKETS 3 -#define COC_FAR_CLAMPING -#define CENTER_DENSITY -#define RING_DENSITY 8.0 -#define UNIFORM_WEIGHTS -//#define ADAPTIVE_RADIUS #define GRADIENT_NOISE +#define RING_DENSITY 8.0 +#define ADAPTIVE_RADIUS //#define OCTAWEB_SORTING +//#define UNIFORM_WEIGHTS -// DO NOT EDIT: Helper defines so we can have one code path when OCTAWEB_SORTING is enabled or disabled. +// Helper defines so we can have one code path when OCTAWEB_SORTING is enabled or disabled. #ifndef OCTAWEB_SORTING #define RING_COLOR totalColor #define RING_MAXCOC maxCoC - #define RING_HITS totalHits #define RING_ALPHA totalAlpha #else #define RING_COLOR ringColor #define RING_MAXCOC ringMaxCoC - #define RING_HITS ringHits #define RING_ALPHA ringAlpha #endif @@ -78,18 +78,33 @@ float GetCoCRadius(int2 positionSS, out int bucketIndex) { float CoCRadius = LOAD_TEXTURE2D_X(_InputCoCTexture, positionSS).x; bucketIndex = GetCoCBucket(CoCRadius); - return abs(CoCRadius); + return CoCRadius; } float GetCoCMaxRadius(int2 positionSS) { #ifndef ADAPTIVE_RADIUS - return _Params.y; + return MaxCoCRadius; #else // We only have up to 6 mip levels - int lod = min(6, _Params.z); - float maxOverlapingCoC = LOAD_TEXTURE2D_X_LOD(_InputCoCTexture, positionSS >> lod, lod).x; - return abs(maxOverlapingCoC); + int lod = min(6, MaxCoCMipLevel); + uint4 size; + _InputCoCTexture.GetDimensions(lod, size.x, size.y, size.z, size.w); + + // Take RTHandleScale into account and odd texture dimension sizes (it's not enough to do a positionSS >> lod) + uint2 coords = positionSS * _ScreenSize.zw * size.xy * _RTHandleScale.xy; + + // Find the max CoC that is overlapping this pixel by sampling the max neighborhood + float maxOverlapingCoC = abs(LOAD_TEXTURE2D_X_LOD(_InputCoCTexture, coords, lod).x); + maxOverlapingCoC = max(maxOverlapingCoC, abs(LOAD_TEXTURE2D_X_LOD(_InputCoCTexture, coords + uint2 (1, 0), lod).x)); + maxOverlapingCoC = max(maxOverlapingCoC, abs(LOAD_TEXTURE2D_X_LOD(_InputCoCTexture, coords + uint2 (0, 1), lod).x)); + maxOverlapingCoC = max(maxOverlapingCoC, abs(LOAD_TEXTURE2D_X_LOD(_InputCoCTexture, coords + uint2 (1, 1), lod).x)); + maxOverlapingCoC = max(maxOverlapingCoC, abs(LOAD_TEXTURE2D_X_LOD(_InputCoCTexture, coords + uint2 (-1, 0), lod).x)); + maxOverlapingCoC = max(maxOverlapingCoC, abs(LOAD_TEXTURE2D_X_LOD(_InputCoCTexture, coords + uint2 (0, -1), lod).x)); + maxOverlapingCoC = max(maxOverlapingCoC, abs(LOAD_TEXTURE2D_X_LOD(_InputCoCTexture, coords + uint2 (-1, -1), lod).x)); + maxOverlapingCoC = max(maxOverlapingCoC, abs(LOAD_TEXTURE2D_X_LOD(_InputCoCTexture, coords + uint2 (-1, 1), lod).x)); + maxOverlapingCoC = max(maxOverlapingCoC, abs(LOAD_TEXTURE2D_X_LOD(_InputCoCTexture, coords + uint2 (1, -1), lod).x)); + return maxOverlapingCoC; #endif } @@ -99,12 +114,9 @@ float GetSampleWeight(float cocRadius) return 1.0f; #endif - if (cocRadius == 0.0) return 0.0; - float pixelRadius = 0.7071f; - float singlePixelArea = PI * pixelRadius * pixelRadius; - float diskArea = PI * cocRadius * cocRadius; - return diskArea >= singlePixelArea ? rcp(diskArea) : rcp(singlePixelArea); + float radius = max(pixelRadius, cocRadius); + return rcp(PI * radius * radius); } float2 PointInCircle(float angle) @@ -112,26 +124,34 @@ float2 PointInCircle(float angle) return float2(cos(angle), sin(angle)); } -float GetNumSamples(float radius) +float GetNumSamples(float radius, float maxRadius) { - //TODO: needs to take into account the density push towards the center (when enabled) - float numRings = _Params.x; - float maxRadius = _Params.y; - - float dR = maxRadius / numRings; + float dR = maxRadius / NumRings; float rings = floor(radius / dR); float seriesSum = 0.5 * rings * (rings + 1); return 1.0 + seriesSum * RING_DENSITY; } +float GetRingWeight(int index, float dR) +{ + float ringRadius = index * dR; + float ringArea = PI * ringRadius * ringRadius; + + float prevRingRadius = max(0, index - 1) * dR; + float prevRingArea = PI * prevRingRadius * prevRingRadius; + + ringArea = ringArea - prevRingArea; + + float ringSamples = RING_DENSITY * index; + ringSamples = (index == 1) ? ringSamples + 1.0f : ringSamples; + return ringArea * rcp (ringSamples); +} + [numthreads(GROUP_RES, GROUP_RES, 1)] void KMain(uint3 dispatchThreadId : SV_DispatchThreadID) { PositionInputs posInputs = GetPositionInput(float2(dispatchThreadId.xy), _ScreenSize.zw, uint2(GROUP_RES, GROUP_RES)); - - CTYPE centerColor = LOAD_TEXTURE2D_X(_InputTexture, posInputs.positionSS).CTYPE_SWIZZLE; int bucketIndex = 0; - float centerCoc = GetCoCRadius(posInputs.positionSS, bucketIndex); // Bucket 0 : far focus region // Bucket 1 : in focus region @@ -150,60 +170,42 @@ void KMain(uint3 dispatchThreadId : SV_DispatchThreadID) totalColor[i] = 0.0f; maxCoC[i] = 0.0f; prevRingWeight[i] = 0.0f; - totalHits[i] = 0.0f; #ifdef ENABLE_ALPHA totalAlpha[i] = 0.0f; #endif } - // Record the central sample - { - float weight = GetSampleWeight(centerCoc); - totalColor[bucketIndex].xyz = centerColor.xyz * weight; - totalColor[bucketIndex].w = weight; - maxCoC[bucketIndex] = centerCoc; - prevRingWeight[bucketIndex] = weight; -#ifdef ENABLE_ALPHA - totalAlpha[bucketIndex] = centerColor.w; -#endif - } - - float numRings = _Params.x; - float maxRadius = GetCoCMaxRadius(posInputs.positionSS); + float maxRadius = GetCoCMaxRadius(posInputs.positionSS); - float dR = maxRadius * rcp(numRings); + float dR = maxRadius * rcp(NumRings); float stratum = fmod(posInputs.positionSS.x + posInputs.positionSS.y, 2.0f); - // Note: gradient noise seems to give worse results than s - float noise = InterleavedGradientNoise(posInputs.positionSS.xy, 0); - float noise2 = InterleavedGradientNoise(posInputs.positionSS.xy, 9); + + int sampleOffset = _TaaFrameInfo.w != 0.0 ? _TaaFrameInfo.z : 0; + float noise = InterleavedGradientNoise(posInputs.positionSS.xy, sampleOffset); + float noise2 = InterleavedGradientNoise(posInputs.positionSS.xy, 8 + sampleOffset); // Iterate over the octaweb pattern and gather the DoF samples - for (float ring = 1; ring <= numRings; ring += 1.0) + for (float ring = NumRings; ring >= 0; ring -= 1.0) { - float scaledRad = dR; - -#ifdef CENTER_DENSITY - // make the samples more dense at the center - scaledRad = lerp(0.5 * dR, dR, ring * rcp(numRings)); -#endif - - float dAng = 2.0f * PI / (ring * RING_DENSITY); + float numSamples = max(ring * RING_DENSITY, 1); + float dAng = 2.0f * PI / numSamples; - // for high sample counts, checkerboarding looks better #ifndef GRADIENT_NOISE - float radius = ring * scaledRad + stratum * scaledRad; + float radius = ring * dR + stratum * dR; float ringOffset = 0.5 * fmod(ring, 2.0f) * dAng; #else - float radius = ring * scaledRad + noise2 * scaledRad; + float radius = (ring - 1) * dR + noise2 * dR; float ringOffset = noise * dAng; #endif + float ringWeight = GetRingWeight(max(1, ring), dR); + #ifdef OCTAWEB_SORTING float4 ringColor[NUM_BUCKETS]; float ringMaxCoC[NUM_BUCKETS]; float ringHits[NUM_BUCKETS]; #ifdef ENABLE_ALPHA - float4 ringAlpha[NUM_BUCKETS]; + float ringAlpha[NUM_BUCKETS]; #endif for (int i = 0; i < NUM_BUCKETS; ++i) @@ -226,127 +228,56 @@ void KMain(uint3 dispatchThreadId : SV_DispatchThreadID) CTYPE sampleColor = LOAD_TEXTURE2D_X(_InputTexture, sampleTC).CTYPE_SWIZZLE; float sampleCoC = GetCoCRadius(sampleTC, sampleBucket); -#ifdef COC_FAR_CLAMPING - if (sampleBucket < 1) - sampleCoC = clamp(sampleCoC, 0, centerCoc); -#endif - - if (sampleCoC > radius) + if (abs(sampleCoC) >= radius) { - float weight = GetSampleWeight(sampleCoC); - RING_COLOR[sampleBucket].xyz += sampleColor.xyz * weight; - RING_COLOR[sampleBucket].w += weight; - RING_MAXCOC[sampleBucket] = max(RING_MAXCOC[sampleBucket], sampleCoC); - RING_HITS[sampleBucket] += 1.0; -#ifdef ENABLE_ALPHA - RING_ALPHA[sampleBucket] = sampleColor.w * weight; + float weight = ringWeight * GetSampleWeight(abs(sampleCoC)); + RING_COLOR[sampleBucket] += float4(sampleColor.xyz * weight, weight); + RING_MAXCOC[sampleBucket] = max(RING_MAXCOC[sampleBucket], abs(sampleCoC)); +#ifdef OCTAWEB_SORTING + ringHits[sampleBucket] += 1.0; #endif - } -#ifdef UNIFORM_WEIGHTS - else - { - RING_COLOR[sampleBucket].xyz += RING_COLOR[sampleBucket].xyz / RING_COLOR[sampleBucket].w; - RING_COLOR[sampleBucket].w += 1.0f; #ifdef ENABLE_ALPHA - RING_ALPHA[sampleBucket] += RING_ALPHA[sampleBucket] / RING_COLOR[sampleBucket].w; + RING_ALPHA[sampleBucket] = sampleColor.w * weight; #endif } -#endif } #ifdef OCTAWEB_SORTING - // Far bucket - { - totalColor[0] += RING_COLOR[0]; - maxCoC[0] = max(maxCoC[0], RING_MAXCOC[0]); - totalHits[0] += RING_HITS[0]; -#ifdef ENABLE_ALPHA - totalAlpha[0] += RING_ALPHA[0]; -#endif - } - - // In-focus bucket - { - totalColor[1] += RING_COLOR[1]; - maxCoC[1] = max(maxCoC[1], RING_MAXCOC[1]); - totalHits[1] += RING_HITS[1]; -#ifdef ENABLE_ALPHA - totalAlpha[1] += RING_ALPHA[1]; -#endif - } - - // Near bucket + for (int j = 0; j < NUM_BUCKETS; ++j) { - float currentAvg = RING_COLOR[2].w * rcp(ring * RING_DENSITY); - float prevAvg = prevRingWeight[2] * ((ring > 1) ? rcp((ring - 1.0) * RING_DENSITY) : 1); + float currentAvg = RING_COLOR[j].w * rcp(ring * RING_DENSITY); + float prevAvg = (ring < numRings) ? prevRingWeight[j] : currentAvg; float occlusion = saturate(prevAvg - currentAvg); - //float alpha = saturate(RING_COLOR[2].w * rcp(GetNumSamples(RING_MAXCOC[2])) * rcp(getSampleWeight(RING_MAXCOC[2]))); - float alpha = ringHits[2] * rcp(ring * RING_DENSITY); - - //totalColor[2] += RING_COLOR[2]; - maxCoC[2] = max(maxCoC[2], RING_MAXCOC[2]); - totalHits[2] += RING_HITS[2]; + float alpha = ringHits[j] * rcp(ring * RING_DENSITY); float blendFactor = 1.0 - alpha * occlusion; - totalColor[2] = blendFactor * totalColor[2] + RING_COLOR[2]; - prevRingWeight[2] = ringColor[2].w; + totalColor[j] = blendFactor * totalColor[j] + RING_COLOR[j]; + prevRingWeight[j] = currentAvg; #ifdef ENABLE_ALPHA - totalAlpha[2] += blendFactor * totalAlpha[2] + RING_ALPHA[2]; + totalAlpha[j] += blendFactor * totalAlpha[j] + RING_ALPHA[j]; #endif } #endif } - // Now compute the final color by combining the near, far and in-focus buckets with proper blending - - // Far range - float3 outColor = totalColor[0].xyz; - float totalW = totalColor[0].w; -#ifdef ENABLE_ALPHA - float outAlpha = totalAlpha[0]; -#endif - -#ifndef UNIFORM_WEIGHTS - // In focus range - { - // TODO: we might need to re-investigate the normaziation here, there is banding - float alpha = saturate(totalColor[1].w * rcp(GetNumSamples(maxCoC[1])) * rcp(GetSampleWeight(maxCoC[1]))); - outColor.xyz = (1.0 - alpha) * outColor.xyz + totalColor[1].xyz; - totalW = (1.0 - alpha) * totalW + totalColor[1].w; -#ifdef ENABLE_ALPHA - outAlpha = (1.0 - alpha) * outAlpha + totalAlpha[1]; -#endif - } - if (totalW > 0) outColor.xyz /= totalW; - - // Near range - float alpha = saturate(3 * totalHits[2] * rcp(GetNumSamples(maxCoC[2]))); - float3 srcColor = totalColor[2].w > 0 ? totalColor[2].xyz / totalColor[2].w : 0.0f; - outColor.xyz = (1.0 - alpha) * outColor.xyz + alpha * srcColor; + float4 outColor = 0; #ifdef ENABLE_ALPHA - float srcAlpha = totalColor[2].w > 0 ? totalAlpha[2] / totalColor[2].w : 0.0f; - outAlpha = (1.0 - alpha) * outAlpha + alpha * srcAlpha; - + float outAlpha = 0; #endif -#else - // back to front alpha blending of the other buckets - for (int j = 1; j < NUM_BUCKETS; ++j) + // back to front alpha blending of the near, far and in-focus buckets + for (int j = 0; j < NUM_BUCKETS; ++j) { - // TODO: we might need to re-investigate the normaziation here, there is banding - float alpha = saturate(totalColor[j].w * rcp(GetNumSamples(maxCoC[j])) * rcp(GetSampleWeight(maxCoC[j]))); - - outColor.xyz = (1.0 - alpha) * outColor.xyz + totalColor[j].xyz; - totalW = (1.0 - alpha) * totalW + totalColor[j].w; + float alpha = saturate(totalColor[j].w * rcp(GetNumSamples(maxCoC[j], maxRadius)) * rcp(GetSampleWeight(maxCoC[j]))); + outColor = (1.0 - alpha) * outColor + alpha * totalColor[j]; #ifdef ENABLE_ALPHA - outAlpha = (1.0 - alpha) * outAlpha + totalAlpha[j]; + outAlpha = (1.0 - alpha) * outAlpha + alpha * totalAlpha[j]; #endif } - outColor.xyz = outColor.xyz * rcp(totalW); -#endif + outColor.xyz = outColor.xyz * rcp(outColor.w); #ifdef ENABLE_ALPHA - _OutputTexture[COORD_TEXTURE2D_X(posInputs.positionSS)] = float4(outColor.xyz, outAlpha * rcp(totalW)); + _OutputTexture[COORD_TEXTURE2D_X(posInputs.positionSS)] = float4(outColor.xyz, outAlpha * rcp(outColor.w)); #else _OutputTexture[COORD_TEXTURE2D_X(posInputs.positionSS)] = outColor.xyz; #endif diff --git a/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/TemporalAntiAliasing.shader b/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/TemporalAntiAliasing.shader index b88b6224d8d..907473c39e8 100644 --- a/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/TemporalAntiAliasing.shader +++ b/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/TemporalAntiAliasing.shader @@ -14,7 +14,7 @@ Shader "Hidden/HDRP/TemporalAA" #pragma multi_compile_local _ FORCE_BILINEAR_HISTORY #pragma multi_compile_local _ ENABLE_MV_REJECTION #pragma multi_compile_local _ ANTI_RINGING - #pragma multi_compile_local LOW_QUALITY MEDIUM_QUALITY HIGH_QUALITY + #pragma multi_compile_local LOW_QUALITY MEDIUM_QUALITY HIGH_QUALITY POST_DOF #pragma only_renderers d3d11 playstation xboxone vulkan metal switch @@ -69,6 +69,19 @@ Shader "Hidden/HDRP/TemporalAA" #define PERCEPTUAL_SPACE 1 #define PERCEPTUAL_SPACE_ONLY_END 0 && (PERCEPTUAL_SPACE == 0) +#elif defined(POST_DOF) + #define YCOCG 1 + #define HISTORY_SAMPLING_METHOD BILINEAR + #define WIDE_NEIGHBOURHOOD 0 + #define NEIGHBOUROOD_CORNER_METHOD VARIANCE + #define CENTRAL_FILTERING NO_FILTERINGs + #define HISTORY_CLIP DIRECT_CLIP + #define ANTI_FLICKER 1 + #define ANTI_FLICKER_MV_DEPENDENT 1 + #define VELOCITY_REJECTION (defined(ENABLE_MV_REJECTION) && 0) + #define PERCEPTUAL_SPACE 1 + #define PERCEPTUAL_SPACE_ONLY_END 0 && (PERCEPTUAL_SPACE == 0) + #endif @@ -85,9 +98,10 @@ Shader "Hidden/HDRP/TemporalAA" #define _SpeedRejectionIntensity _TaaPostParameters.z #define _ContrastForMaxAntiFlicker _TaaPostParameters.w - +#if VELOCITY_REJECTION TEXTURE2D_X(_InputVelocityMagnitudeHistory); RW_TEXTURE2D_X(float, _OutputVelocityMagnitudeHistory); +#endif float4 _TaaPostParameters; float4 _TaaHistorySize; diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Camera/HDAdditionalCameraData.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Camera/HDAdditionalCameraData.cs index a9667339265..fe2b89f1c15 100644 --- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Camera/HDAdditionalCameraData.cs +++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Camera/HDAdditionalCameraData.cs @@ -13,7 +13,7 @@ public class HDPhysicalCamera /// /// The minimum allowed aperture. /// - public const float kMinAperture = 1f; + public const float kMinAperture = 0.7f; /// /// The maximum allowed aperture. diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Camera/HDCameraFrameHistoryType.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Camera/HDCameraFrameHistoryType.cs index 08a16af16a0..95c52429323 100644 --- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Camera/HDCameraFrameHistoryType.cs +++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Camera/HDCameraFrameHistoryType.cs @@ -41,6 +41,8 @@ public enum HDCameraFrameHistoryType RayTracedSubSurface, /// Path tracing buffer. PathTracing, + /// Temporal antialiasing history after DoF. + TemporalAntialiasingPostDoF, /// Number of history buffers. Count } diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/GlobalPostProcessingQualitySettings.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/GlobalPostProcessingQualitySettings.cs index f77d6923019..2710fc3a8c3 100644 --- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/GlobalPostProcessingQualitySettings.cs +++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/GlobalPostProcessingQualitySettings.cs @@ -122,6 +122,8 @@ internal GlobalPostProcessingQualitySettings() public DepthOfFieldResolution[] DoFResolution = new DepthOfFieldResolution[s_QualitySettingCount]; /// Use Depth of field high quality filtering for each quality level. public bool[] DoFHighQualityFiltering = new bool[s_QualitySettingCount]; + /// Use Depth of field high physically based setting for each quality level. + public bool[] DoFPhysicallyBased = new bool[s_QualitySettingCount]; /* Motion Blur */ /// Motion Blur sample count for each quality level.