diff --git a/com.unity.render-pipelines.high-definition/CHANGELOG.md b/com.unity.render-pipelines.high-definition/CHANGELOG.md
index fde54493b80..a8274c75983 100644
--- a/com.unity.render-pipelines.high-definition/CHANGELOG.md
+++ b/com.unity.render-pipelines.high-definition/CHANGELOG.md
@@ -83,6 +83,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
- Added the support of volumetric clouds for baked and realtime reflection probes.
- Added a property to control the fallback of the last bounce of a RTGI, RTR, RR ray to keep a previously existing side effect on user demand (case 1350590).
- Added a parameter to control the vertical shape offset of the volumetric clouds (case 1358528).
+- Added an option to render screen space global illumination in half resolution to achieve real-time compatible performance in high resolutions (case 1353727).
### Fixed
- Fixed Intensity Multiplier not affecting realtime global illumination.
diff --git a/com.unity.render-pipelines.high-definition/Documentation~/Override-Screen-Space-GI.md b/com.unity.render-pipelines.high-definition/Documentation~/Override-Screen-Space-GI.md
index c81f68d8931..a96db2acad0 100644
--- a/com.unity.render-pipelines.high-definition/Documentation~/Override-Screen-Space-GI.md
+++ b/com.unity.render-pipelines.high-definition/Documentation~/Override-Screen-Space-GI.md
@@ -45,6 +45,7 @@ HDRP uses the [Volume](Volumes.md) framework to calculate SSGI, so to enable and
| - **Half Resolution Denoiser** | Enable this feature to evaluate the spatio-temporal filter in half resolution. This decreases the resource intensity of denoising but reduces quality. |
| - **Denoiser Radius** | Set the radius of the spatio-temporal filter. |
| - **Second Denoiser Pass** | Enable this feature to process a second denoiser pass. This helps to remove noise from the effect. |
+| **Full Resolution** | Enable this feature to increase the ray budget to one ray per pixel, per frame. Disable this feature to decrease the ray budget to one ray per four pixels, per frame.|
| **Depth Tolerance** | Use the slider to control the tolerance when comparing the depth of the GameObjects on screen and the depth buffer. Because the SSR algorithm can not distinguish thin GameObjects from thick ones, this property helps trace rays behind GameObjects. The algorithm applies this property to every GameObject uniformly. |
| **Ray Miss** | Determines what HDRP does when a screen space global illumination (SSGI) ray doesn't find an intersection. Choose from one of the following options:
•**Reflection probes**: HDRP uses reflection probes in your scene to calculate the missing SSGI intersection.
•**Sky**: HDRP uses the sky defined by the current [Volume](Volumes.md) settings to calculate the missing SSGI intersection.
•**Both**: HDRP uses both reflection probes and the sky defined by the current [Volume](Volumes.md) settings to calculate the missing SSGI intersection.
•**Nothing**: HDRP does not calculate indirect lighting when SSGI doesn't find an intersection.
This property is set to **Both** by default. |
diff --git a/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Raytracing/GlobalIlluminationEditor.cs b/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Raytracing/GlobalIlluminationEditor.cs
index d15d7334af4..948a040ff67 100644
--- a/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Raytracing/GlobalIlluminationEditor.cs
+++ b/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Raytracing/GlobalIlluminationEditor.cs
@@ -15,6 +15,7 @@ class GlobalIlluminatorEditor : VolumeComponentWithQualityEditor
SerializedDataParameter m_RayMiss;
// Screen space global illumination parameters
+ SerializedDataParameter m_FullResolutionSS;
SerializedDataParameter m_DepthBufferThickness;
SerializedDataParameter m_RaySteps;
@@ -60,6 +61,7 @@ public override void OnEnable()
m_RayMiss = Unpack(o.Find(x => x.rayMiss));
// SSGI Parameters
+ m_FullResolutionSS = Unpack(o.Find(x => x.fullResolutionSS));
m_DepthBufferThickness = Unpack(o.Find(x => x.depthBufferThickness));
m_RaySteps = Unpack(o.Find(x => x.maxRaySteps));
@@ -98,6 +100,7 @@ public override void OnEnable()
}
static public readonly GUIContent k_RayLengthText = EditorGUIUtility.TrTextContent("Max Ray Length", "Controls the maximal length of global illumination rays. The higher this value is, the more expensive ray traced global illumination is.");
+ static public readonly GUIContent k_FullResolutionSSText = EditorGUIUtility.TrTextContent("Full Resolution", "Controls if the screen space global illumination should be evaluated at half resolution.");
static public readonly GUIContent k_DepthBufferThicknessText = EditorGUIUtility.TrTextContent("Depth Tolerance", "Controls the tolerance when comparing the depth of two pixels.");
static public readonly GUIContent k_RayMissFallbackHierarchyText = EditorGUIUtility.TrTextContent("Ray Miss", "Controls the fallback hierarchy for indirect diffuse in case the ray misses.");
static public readonly GUIContent k_LastBounceFallbackHierarchyText = EditorGUIUtility.TrTextContent("Last Bounce", "Controls the fallback hierarchy for lighting the last bounce.");
@@ -245,6 +248,7 @@ public override void OnInspectorGUI()
PropertyField(m_RaySteps);
DenoiserSSGUI();
}
+ PropertyField(m_FullResolutionSS, k_FullResolutionSSText);
PropertyField(m_DepthBufferThickness, k_DepthBufferThicknessText);
PropertyField(m_RayMiss, k_RayMissFallbackHierarchyText);
}
diff --git a/com.unity.render-pipelines.high-definition/Runtime/Lighting/GlobalIllumination.cs b/com.unity.render-pipelines.high-definition/Runtime/Lighting/GlobalIllumination.cs
index 3efa509db35..7da02e3aa4d 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/Lighting/GlobalIllumination.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/Lighting/GlobalIllumination.cs
@@ -49,6 +49,11 @@ bool UsesQualityMode()
displayName = "Screen Space Global Illumination";
}
+ ///
+ /// Defines if the screen space global illumination should be evaluated at full resolution.
+ ///
+ public BoolParameter fullResolutionSS = new BoolParameter(true);
+
///
/// The number of steps that should be used during the ray marching pass.
///
diff --git a/com.unity.render-pipelines.high-definition/Runtime/Lighting/ScreenSpaceLighting/HDRenderPipeline.ScreenSpaceGlobalIllumination.cs b/com.unity.render-pipelines.high-definition/Runtime/Lighting/ScreenSpaceLighting/HDRenderPipeline.ScreenSpaceGlobalIllumination.cs
index d29b10f3137..3d0cbc80939 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/Lighting/ScreenSpaceLighting/HDRenderPipeline.ScreenSpaceGlobalIllumination.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/Lighting/ScreenSpaceLighting/HDRenderPipeline.ScreenSpaceGlobalIllumination.cs
@@ -161,20 +161,18 @@ TextureHandle TraceSSGI(RenderGraph renderGraph, HDCamera hdCamera, GlobalIllumi
{
builder.EnableAsyncCompute(false);
- //if (true)
+ if (giSettings.fullResolutionSS.value)
{
passData.texWidth = hdCamera.actualWidth;
passData.texHeight = hdCamera.actualHeight;
passData.halfScreenSize.Set(passData.texWidth * 0.5f, passData.texHeight * 0.5f, 2.0f / passData.texWidth, 2.0f / passData.texHeight);
}
- /*
else
{
passData.texWidth = hdCamera.actualWidth / 2;
passData.texHeight = hdCamera.actualHeight / 2;
passData.halfScreenSize.Set(passData.texWidth, passData.texHeight, 1.0f / passData.texWidth, 1.0f / passData.texHeight);
}
- */
passData.viewCount = hdCamera.viewCount;
// Set the generation parameters
@@ -189,8 +187,8 @@ TextureHandle TraceSSGI(RenderGraph renderGraph, HDCamera hdCamera, GlobalIllumi
// Grab the right kernel
passData.ssGICS = asset.renderPipelineResources.shaders.screenSpaceGlobalIlluminationCS;
- passData.traceKernel = true ? m_TraceGlobalIlluminationKernel : m_TraceGlobalIlluminationHalfKernel;
- passData.projectKernel = true ? m_ReprojectGlobalIlluminationKernel : m_ReprojectGlobalIlluminationHalfKernel;
+ passData.traceKernel = giSettings.fullResolutionSS.value ? m_TraceGlobalIlluminationKernel : m_TraceGlobalIlluminationHalfKernel;
+ passData.projectKernel = giSettings.fullResolutionSS.value ? m_ReprojectGlobalIlluminationKernel : m_ReprojectGlobalIlluminationHalfKernel;
BlueNoise blueNoise = GetBlueNoiseManager();
passData.ditheredTextureSet = blueNoise.DitheredTextureSet8SPP();
@@ -384,7 +382,7 @@ class ConvertSSGIPassData
public TextureHandle outputBuffer;
}
- TextureHandle ConvertSSGI(RenderGraph renderGraph, HDCamera hdCamera, bool halfResolution,
+ TextureHandle ConvertSSGI(RenderGraph renderGraph, HDCamera hdCamera, bool fullResolution,
TextureHandle depthPyramid, TextureHandle stencilBuffer, TextureHandle normalBuffer,
TextureHandle inputBuffer)
{
@@ -393,7 +391,7 @@ TextureHandle ConvertSSGI(RenderGraph renderGraph, HDCamera hdCamera, bool halfR
builder.EnableAsyncCompute(false);
// Set the camera parameters
- if (!halfResolution)
+ if (fullResolution)
{
passData.texWidth = hdCamera.actualWidth;
passData.texHeight = hdCamera.actualHeight;
@@ -407,7 +405,7 @@ TextureHandle ConvertSSGI(RenderGraph renderGraph, HDCamera hdCamera, bool halfR
// Grab the right kernel
passData.ssGICS = m_Asset.renderPipelineResources.shaders.screenSpaceGlobalIlluminationCS;
- passData.convertKernel = halfResolution ? m_ConvertSSGIHalfKernel : m_ConvertSSGIKernel;
+ passData.convertKernel = fullResolution ? m_ConvertSSGIKernel : m_ConvertSSGIHalfKernel;
passData.offsetBuffer = m_DepthBufferMipChainInfo.GetOffsetBufferData(m_DepthPyramidMipLevelOffsetsBuffer);
@@ -446,13 +444,13 @@ TextureHandle ConvertSSGI(RenderGraph renderGraph, HDCamera hdCamera, bool halfR
}
}
- TextureHandle DenoiseSSGI(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle rtGIBuffer, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectorBuffer, TextureHandle historyValidationTexture)
+ TextureHandle DenoiseSSGI(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle rtGIBuffer, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectorBuffer, TextureHandle historyValidationTexture, bool fullResolution)
{
var giSettings = hdCamera.volumeStack.GetComponent();
if (giSettings.denoiseSS)
{
// Evaluate the history's validity
- float historyValidity0 = EvaluateIndirectDiffuseHistoryValidity0(hdCamera, true, false);
+ float historyValidity0 = EvaluateIndirectDiffuseHistoryValidity0(hdCamera, fullResolution, false);
HDTemporalFilter temporalFilter = GetTemporalFilter();
HDDiffuseDenoiser diffuseDenoiser = GetDiffuseDenoiser();
@@ -465,34 +463,47 @@ TextureHandle DenoiseSSGI(RenderGraph renderGraph, HDCamera hdCamera, TextureHan
filterParams.occluderMotionRejection = false;
filterParams.receiverMotionRejection = false;
filterParams.exposureControl = true;
+ filterParams.fullResolution = fullResolution;
TextureHandle denoisedRTGI = temporalFilter.Denoise(renderGraph, hdCamera, filterParams, rtGIBuffer, renderGraph.defaultResources.blackTextureXR, historyBufferHF, depthPyramid, normalBuffer, motionVectorBuffer, historyValidationTexture);
// Apply the diffuse denoiser
- rtGIBuffer = diffuseDenoiser.Denoise(renderGraph, hdCamera, singleChannel: false, kernelSize: giSettings.denoiserRadiusSS, halfResolutionFilter: giSettings.halfResolutionDenoiserSS, jitterFilter: giSettings.secondDenoiserPassSS, denoisedRTGI, depthPyramid, normalBuffer, rtGIBuffer);
+ HDDiffuseDenoiser.DiffuseDenoiserParameters ddParams;
+ ddParams.singleChannel = false;
+ ddParams.kernelSize = giSettings.denoiserRadiusSS;
+ ddParams.halfResolutionFilter = giSettings.halfResolutionDenoiserSS;
+ ddParams.jitterFilter = giSettings.secondDenoiserPassSS;
+ ddParams.fullResolutionInput = fullResolution;
+ rtGIBuffer = diffuseDenoiser.Denoise(renderGraph, hdCamera, ddParams, denoisedRTGI, depthPyramid, normalBuffer, rtGIBuffer);
// If the second pass is requested, do it otherwise blit
if (giSettings.secondDenoiserPassSS)
{
- float historyValidity1 = EvaluateIndirectDiffuseHistoryValidity1(hdCamera, true, false);
+ float historyValidity1 = EvaluateIndirectDiffuseHistoryValidity1(hdCamera, fullResolution, false);
- // Run the temporal denoiser
+ // Run the temporal filter
TextureHandle historyBufferLF = renderGraph.ImportTexture(RequestIndirectDiffuseHistoryTextureLF(hdCamera));
filterParams.singleChannel = false;
filterParams.historyValidity = historyValidity1;
filterParams.occluderMotionRejection = false;
filterParams.receiverMotionRejection = false;
filterParams.exposureControl = true;
+ filterParams.fullResolution = fullResolution;
denoisedRTGI = temporalFilter.Denoise(renderGraph, hdCamera, filterParams, rtGIBuffer, renderGraph.defaultResources.blackTextureXR, historyBufferLF, depthPyramid, normalBuffer, motionVectorBuffer, historyValidationTexture);
- // Apply the diffuse denoiser
- rtGIBuffer = diffuseDenoiser.Denoise(renderGraph, hdCamera, singleChannel: false, kernelSize: giSettings.denoiserRadiusSS * 0.5f, halfResolutionFilter: giSettings.halfResolutionDenoiserSS, jitterFilter: false, denoisedRTGI, depthPyramid, normalBuffer, rtGIBuffer);
+ // Apply the diffuse filter
+ ddParams.singleChannel = false;
+ ddParams.kernelSize = giSettings.denoiserRadiusSS * 0.5f;
+ ddParams.halfResolutionFilter = giSettings.halfResolutionDenoiserSS;
+ ddParams.jitterFilter = false;
+ ddParams.fullResolutionInput = fullResolution;
+ rtGIBuffer = diffuseDenoiser.Denoise(renderGraph, hdCamera, ddParams, denoisedRTGI, depthPyramid, normalBuffer, rtGIBuffer);
// Propagate the history validity for the second buffer
- PropagateIndirectDiffuseHistoryValidity1(hdCamera, true, false);
+ PropagateIndirectDiffuseHistoryValidity1(hdCamera, fullResolution, false);
}
// Propagate the history validity for the first buffer
- PropagateIndirectDiffuseHistoryValidity0(hdCamera, true, false);
+ PropagateIndirectDiffuseHistoryValidity0(hdCamera, fullResolution, false);
return rtGIBuffer;
}
@@ -513,17 +524,16 @@ TextureHandle RenderSSGI(RenderGraph renderGraph, HDCamera hdCamera,
TextureHandle colorBuffer = TraceSSGI(renderGraph, hdCamera, giSettings, depthPyramid, normalBuffer, stencilBuffer, motionVectorsBuffer, lightList);
// Denoise the result
- TextureHandle denoisedSSGI = DenoiseSSGI(renderGraph, hdCamera, colorBuffer, depthPyramid, normalBuffer, motionVectorsBuffer, historyValidationTexture);
+ TextureHandle denoisedSSGI = DenoiseSSGI(renderGraph, hdCamera, colorBuffer, depthPyramid, normalBuffer, motionVectorsBuffer, historyValidationTexture, giSettings.fullResolutionSS.value);
// Convert back the result to RGB space
- colorBuffer = ConvertSSGI(renderGraph, hdCamera, false, depthPyramid, stencilBuffer, normalBuffer, denoisedSSGI);
+ colorBuffer = ConvertSSGI(renderGraph, hdCamera, giSettings.fullResolutionSS.value, depthPyramid, stencilBuffer, normalBuffer, denoisedSSGI);
- /*
// Upscale it if required
// If this was a half resolution effect, we still have to upscale it
- if (!giSettings.fullResolutionSS)
+ if (!giSettings.fullResolutionSS.value)
colorBuffer = UpscaleSSGI(renderGraph, hdCamera, giSettings, info, depthPyramid, colorBuffer);
- */
+
return colorBuffer;
}
}
diff --git a/com.unity.render-pipelines.high-definition/Runtime/Lighting/ScreenSpaceLighting/ScreenSpaceGlobalIllumination.compute b/com.unity.render-pipelines.high-definition/Runtime/Lighting/ScreenSpaceLighting/ScreenSpaceGlobalIllumination.compute
index 5c90715fb9d..8568248cad4 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/Lighting/ScreenSpaceLighting/ScreenSpaceGlobalIllumination.compute
+++ b/com.unity.render-pipelines.high-definition/Runtime/Lighting/ScreenSpaceLighting/ScreenSpaceGlobalIllumination.compute
@@ -78,21 +78,22 @@ void TRACE_GLOBAL_ILLUMINATION(uint3 dispatchThreadId : SV_DispatchThreadID, uin
// Compute the pixel position to process
uint2 currentCoord = dispatchThreadId.xy;
+ uint2 inputCoord = dispatchThreadId.xy;
#if HALF_RES
// Compute the full resolution pixel for the inputs that do not have a pyramid
- currentCoord = currentCoord * 2;
+ inputCoord = inputCoord * 2;
#endif
// Read the depth value as early as possible
- float deviceDepth = LOAD_TEXTURE2D_X(_DepthTexture, currentCoord).x;
+ float deviceDepth = LOAD_TEXTURE2D_X(_DepthTexture, inputCoord).x;
// Initialize the hitpoint texture to a miss
- _IndirectDiffuseHitPointTextureRW[COORD_TEXTURE2D_X(dispatchThreadId.xy)] = float2(99.0, 0.0);
+ _IndirectDiffuseHitPointTextureRW[COORD_TEXTURE2D_X(currentCoord.xy)] = float2(99.0, 0.0);
// Read the pixel normal
NormalData normalData;
- DecodeFromNormalBuffer(currentCoord.xy, normalData);
+ DecodeFromNormalBuffer(inputCoord.xy, normalData);
// Generete a new direction to follow
float2 newSample;
@@ -108,7 +109,7 @@ void TRACE_GLOBAL_ILLUMINATION(uint3 dispatchThreadId : SV_DispatchThreadID, uin
// If this is a background pixel, we flag the ray as a dead ray (we are also trying to keep the usage of the depth buffer the latest possible)
bool killRay = deviceDepth == UNITY_RAW_FAR_CLIP_VALUE;
// Convert this to a world space position (camera relative)
- PositionInputs posInput = GetPositionInput(currentCoord, _ScreenSize.zw, deviceDepth, UNITY_MATRIX_I_VP, GetWorldToViewMatrix(), 0);
+ PositionInputs posInput = GetPositionInput(inputCoord, _ScreenSize.zw, deviceDepth, UNITY_MATRIX_I_VP, GetWorldToViewMatrix(), 0);
// Compute the view direction (world space)
float3 viewWS = GetWorldSpaceNormalizeViewDir(posInput.positionWS);
@@ -129,7 +130,7 @@ void TRACE_GLOBAL_ILLUMINATION(uint3 dispatchThreadId : SV_DispatchThreadID, uin
// recompute it using the last value of 't', which would result in an overshoot.
// It also needs to be precisely at the center of the pixel to avoid artifacts.
float2 hitPositionNDC = floor(rayPos.xy) * _ScreenSize.zw + (0.5 * _ScreenSize.zw); // Should we precompute the half-texel bias? We seem to use it a lot.
- _IndirectDiffuseHitPointTextureRW[COORD_TEXTURE2D_X(dispatchThreadId.xy)] = hitPositionNDC;
+ _IndirectDiffuseHitPointTextureRW[COORD_TEXTURE2D_X(currentCoord.xy)] = hitPositionNDC;
}
}
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDDiffuseDenoiser.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDDiffuseDenoiser.cs
index 1e1a5b6524b..645cd1a1bd7 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDDiffuseDenoiser.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDDiffuseDenoiser.cs
@@ -50,6 +50,7 @@ class DiffuseDenoiserPassData
public bool halfResolutionFilter;
public bool jitterFilter;
public int frameIndex;
+ public bool fullResolutionInput;
// Kernels
public int bilateralFilterKernel;
@@ -66,7 +67,16 @@ class DiffuseDenoiserPassData
public TextureHandle outputBuffer;
}
- public TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, bool singleChannel, float kernelSize, bool halfResolutionFilter, bool jitterFilter,
+ internal struct DiffuseDenoiserParameters
+ {
+ public bool singleChannel;
+ public float kernelSize;
+ public bool halfResolutionFilter;
+ public bool jitterFilter;
+ public bool fullResolutionInput;
+ }
+
+ public TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, DiffuseDenoiserParameters denoiserParams,
TextureHandle noisyBuffer, TextureHandle depthBuffer, TextureHandle normalBuffer, TextureHandle outputBuffer)
{
using (var builder = renderGraph.AddRenderPass("DiffuseDenoiser", out var passData, ProfilingSampler.Get(HDProfileId.DiffuseFilter)))
@@ -76,20 +86,29 @@ public TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, bool si
// Fetch all the resources
// Camera parameters
- passData.texWidth = hdCamera.actualWidth;
- passData.texHeight = hdCamera.actualHeight;
+ if (denoiserParams.fullResolutionInput)
+ {
+ passData.texWidth = hdCamera.actualWidth;
+ passData.texHeight = hdCamera.actualHeight;
+ }
+ else
+ {
+ passData.texWidth = hdCamera.actualWidth / 2;
+ passData.texHeight = hdCamera.actualHeight / 2;
+ }
passData.viewCount = hdCamera.viewCount;
// Denoising parameters
- passData.pixelSpreadTangent = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight);
- passData.kernelSize = kernelSize;
- passData.halfResolutionFilter = halfResolutionFilter;
- passData.jitterFilter = jitterFilter;
+ passData.pixelSpreadTangent = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, passData.texWidth, passData.texHeight);
+ passData.kernelSize = denoiserParams.kernelSize;
+ passData.halfResolutionFilter = denoiserParams.halfResolutionFilter;
+ passData.jitterFilter = denoiserParams.jitterFilter;
passData.frameIndex = m_RenderPipeline.RayTracingFrameIndex(hdCamera);
+ passData.fullResolutionInput = denoiserParams.fullResolutionInput;
// Kernels
- passData.bilateralFilterKernel = singleChannel ? m_BilateralFilterSingleKernel : m_BilateralFilterColorKernel;
- passData.gatherKernel = singleChannel ? m_GatherSingleKernel : m_GatherColorKernel;
+ passData.bilateralFilterKernel = denoiserParams.singleChannel ? m_BilateralFilterSingleKernel : m_BilateralFilterColorKernel;
+ passData.gatherKernel = denoiserParams.singleChannel ? m_GatherSingleKernel : m_GatherColorKernel;
// Other parameters
passData.owenScrambleRGBA = m_OwenScrambleRGBA;
@@ -123,6 +142,7 @@ public TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, bool si
else
ctx.cmd.SetComputeIntParam(data.diffuseDenoiserCS, HDShaderIDs._JitterFramePeriod, -1);
+ CoreUtils.SetKeyword(ctx.cmd, "FULL_RESOLUTION_INPUT", data.fullResolutionInput);
ctx.cmd.DispatchCompute(data.diffuseDenoiserCS, data.bilateralFilterKernel, numTilesX, numTilesY, data.viewCount);
if (data.halfResolutionFilter)
@@ -132,6 +152,7 @@ public TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, bool si
ctx.cmd.SetComputeTextureParam(data.diffuseDenoiserCS, data.gatherKernel, HDShaderIDs._DenoiseOutputTextureRW, data.outputBuffer);
ctx.cmd.DispatchCompute(data.diffuseDenoiserCS, data.gatherKernel, numTilesX, numTilesY, data.viewCount);
}
+ CoreUtils.SetKeyword(ctx.cmd, "FULL_RESOLUTION_INPUT", false);
});
return passData.outputBuffer;
}
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRenderPipeline.RaytracingAmbientOcclusion.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRenderPipeline.RaytracingAmbientOcclusion.cs
index 1156621c3f3..5ea9eb600bd 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRenderPipeline.RaytracingAmbientOcclusion.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRenderPipeline.RaytracingAmbientOcclusion.cs
@@ -178,13 +178,21 @@ TextureHandle DenoiseAO(RenderGraph renderGraph, HDCamera hdCamera, TraceAmbient
filterParams.occluderMotionRejection = aoSettings.occluderMotionRejection.value;
filterParams.receiverMotionRejection = aoSettings.receiverMotionRejection.value;
filterParams.exposureControl = false;
+ filterParams.fullResolution = true;
+
TextureHandle denoisedRTAO = GetTemporalFilter().Denoise(renderGraph, hdCamera, filterParams,
traceAOResult.signalBuffer, traceAOResult.velocityBuffer, historyBuffer,
depthBuffer, normalBuffer, motionVectorBuffer, historyValidationBuffer);
// Apply the diffuse denoiser
HDDiffuseDenoiser diffuseDenoiser = GetDiffuseDenoiser();
- return diffuseDenoiser.Denoise(renderGraph, hdCamera, singleChannel: true, kernelSize: aoSettings.denoiserRadius, halfResolutionFilter: false, jitterFilter: false, denoisedRTAO, depthBuffer, normalBuffer, traceAOResult.signalBuffer);
+ HDDiffuseDenoiser.DiffuseDenoiserParameters ddParams;
+ ddParams.singleChannel = true;
+ ddParams.kernelSize = aoSettings.denoiserRadius;
+ ddParams.halfResolutionFilter = false;
+ ddParams.jitterFilter = false;
+ ddParams.fullResolutionInput = true;
+ return diffuseDenoiser.Denoise(renderGraph, hdCamera, ddParams, denoisedRTAO, depthBuffer, normalBuffer, traceAOResult.signalBuffer);
}
else
return traceAOResult.signalBuffer;
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRenderPipeline.RaytracingIndirectDiffuse.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRenderPipeline.RaytracingIndirectDiffuse.cs
index 972ac8b8f84..c2c5134beb2 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRenderPipeline.RaytracingIndirectDiffuse.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRenderPipeline.RaytracingIndirectDiffuse.cs
@@ -529,19 +529,27 @@ TextureHandle DenoiseRTGI(RenderGraph renderGraph, HDCamera hdCamera, TextureHan
filterParams.occluderMotionRejection = false;
filterParams.receiverMotionRejection = giSettings.receiverMotionRejection.value;
filterParams.exposureControl = true;
+ filterParams.fullResolution = true;
+
TextureHandle denoisedRTGI = temporalFilter.Denoise(renderGraph, hdCamera, filterParams,
rtGIBuffer, renderGraph.defaultResources.blackTextureXR, historyBufferHF,
depthPyramid, normalBuffer, motionVectorBuffer, historyValidationTexture);
// Apply the diffuse denoiser
- rtGIBuffer = diffuseDenoiser.Denoise(renderGraph, hdCamera, singleChannel: false, kernelSize: giSettings.denoiserRadius, halfResolutionFilter: giSettings.halfResolutionDenoiser, jitterFilter: giSettings.secondDenoiserPass, denoisedRTGI, depthPyramid, normalBuffer, rtGIBuffer);
+ HDDiffuseDenoiser.DiffuseDenoiserParameters ddParams;
+ ddParams.singleChannel = false;
+ ddParams.kernelSize = giSettings.denoiserRadius;
+ ddParams.halfResolutionFilter = giSettings.halfResolutionDenoiser;
+ ddParams.jitterFilter = giSettings.secondDenoiserPass;
+ ddParams.fullResolutionInput = true;
+ rtGIBuffer = diffuseDenoiser.Denoise(renderGraph, hdCamera, ddParams, denoisedRTGI, depthPyramid, normalBuffer, rtGIBuffer);
// If the second pass is requested, do it otherwise blit
if (giSettings.secondDenoiserPass)
{
float historyValidity1 = EvaluateIndirectDiffuseHistoryValidity1(hdCamera, fullResolution, true);
- // Run the temporal denoiser
+ // Run the temporal filter
TextureHandle historyBufferLF = renderGraph.ImportTexture(RequestIndirectDiffuseHistoryTextureLF(hdCamera));
filterParams.singleChannel = false;
filterParams.historyValidity = historyValidity1;
@@ -552,8 +560,13 @@ TextureHandle DenoiseRTGI(RenderGraph renderGraph, HDCamera hdCamera, TextureHan
rtGIBuffer, renderGraph.defaultResources.blackTextureXR, historyBufferLF,
depthPyramid, normalBuffer, motionVectorBuffer, historyValidationTexture);
- // Apply the diffuse denoiser
- rtGIBuffer = diffuseDenoiser.Denoise(renderGraph, hdCamera, singleChannel: false, kernelSize: giSettings.denoiserRadius * 0.5f, halfResolutionFilter: giSettings.halfResolutionDenoiser, jitterFilter: false, denoisedRTGI, depthPyramid, normalBuffer, rtGIBuffer);
+ // Apply the second diffuse filter
+ ddParams.singleChannel = false;
+ ddParams.kernelSize = giSettings.denoiserRadius * 0.5f;
+ ddParams.halfResolutionFilter = giSettings.halfResolutionDenoiser;
+ ddParams.jitterFilter = false;
+ ddParams.fullResolutionInput = true;
+ rtGIBuffer = diffuseDenoiser.Denoise(renderGraph, hdCamera, ddParams, denoisedRTGI, depthPyramid, normalBuffer, rtGIBuffer);
// Propagate the history validity for the second buffer
PropagateIndirectDiffuseHistoryValidity1(hdCamera, fullResolution, true);
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRenderPipeline.RaytracingSubsurfaceScattering.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRenderPipeline.RaytracingSubsurfaceScattering.cs
index bea8614475c..d28be6ac15d 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRenderPipeline.RaytracingSubsurfaceScattering.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRenderPipeline.RaytracingSubsurfaceScattering.cs
@@ -204,6 +204,7 @@ TextureHandle DenoiseRTSSS(RenderGraph renderGraph, HDCamera hdCamera, TextureHa
filterParams.occluderMotionRejection = false;
filterParams.receiverMotionRejection = true;
filterParams.exposureControl = false;
+ filterParams.fullResolution = true;
return GetTemporalFilter().Denoise(renderGraph, hdCamera, filterParams,
rayTracedSSS, renderGraph.defaultResources.blackTextureXR, historyBuffer,
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDTemporalFilter.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDTemporalFilter.cs
index c1396fb813e..301d8a282dd 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDTemporalFilter.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDTemporalFilter.cs
@@ -69,6 +69,7 @@ internal struct TemporalFilterParameters
public bool occluderMotionRejection;
public bool receiverMotionRejection;
public bool exposureControl;
+ public bool fullResolution;
}
class HistoryValidityPassData
@@ -188,6 +189,7 @@ class TemporalFilterPassData
public bool occluderMotionRejection;
public bool receiverMotionRejection;
public int exposureControl;
+ public bool fullResolution;
// Kernels
public int temporalAccKernel;
@@ -218,16 +220,25 @@ internal TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, Tempo
builder.EnableAsyncCompute(false);
// Camera parameters
- passData.texWidth = hdCamera.actualWidth;
- passData.texHeight = hdCamera.actualHeight;
+ if (filterParams.fullResolution)
+ {
+ passData.texWidth = hdCamera.actualWidth;
+ passData.texHeight = hdCamera.actualHeight;
+ }
+ else
+ {
+ passData.texWidth = hdCamera.actualWidth / 2;
+ passData.texHeight = hdCamera.actualHeight / 2;
+ }
passData.viewCount = hdCamera.viewCount;
// Denoising parameters
- passData.pixelSpreadTangent = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, hdCamera.actualWidth, hdCamera.actualHeight);
+ passData.pixelSpreadTangent = HDRenderPipeline.GetPixelSpreadTangent(hdCamera.camera.fieldOfView, passData.texWidth, passData.texHeight);
passData.historyValidity = filterParams.historyValidity;
passData.receiverMotionRejection = filterParams.receiverMotionRejection;
passData.occluderMotionRejection = filterParams.occluderMotionRejection;
passData.exposureControl = filterParams.exposureControl ? 1 : 0;
+ passData.fullResolution = filterParams.fullResolution;
// Kernels
passData.temporalAccKernel = filterParams.singleChannel ? m_TemporalAccumulationSingleKernel : m_TemporalAccumulationColorKernel;
@@ -279,12 +290,14 @@ internal TextureHandle Denoise(RenderGraph renderGraph, HDCamera hdCamera, Tempo
ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.temporalAccKernel, HDShaderIDs._AccumulationOutputTextureRW, data.outputBuffer);
// Combine signal with history
+ CoreUtils.SetKeyword(ctx.cmd, "FULL_RESOLUTION_FILTER", data.fullResolution);
ctx.cmd.DispatchCompute(data.temporalFilterCS, data.temporalAccKernel, numTilesX, numTilesY, data.viewCount);
// Make sure to copy the new-accumulated signal in our history buffer
ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.copyHistoryKernel, HDShaderIDs._DenoiseInputTexture, data.outputBuffer);
ctx.cmd.SetComputeTextureParam(data.temporalFilterCS, data.copyHistoryKernel, HDShaderIDs._DenoiseOutputTextureRW, data.historyBuffer);
ctx.cmd.DispatchCompute(data.temporalFilterCS, data.copyHistoryKernel, numTilesX, numTilesY, data.viewCount);
+ CoreUtils.SetKeyword(ctx.cmd, "FULL_RESOLUTION_FILTER", true);
});
return passData.outputBuffer;
}
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Denoising/DiffuseDenoiser.compute b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Denoising/DiffuseDenoiser.compute
index 1e915d7d5bd..2ec49f2a023 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Denoising/DiffuseDenoiser.compute
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Denoising/DiffuseDenoiser.compute
@@ -4,6 +4,8 @@
#pragma kernel GatherSingle GATHER_FILTER=GatherSingle SINGLE_CHANNEL
#pragma kernel GatherColor GATHER_FILTER=GatherColor
+#pragma multi_compile _ FULL_RESOLUTION_INPUT
+
// Common includes
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/CommonLighting.hlsl"
@@ -63,18 +65,23 @@ void BILATERAL_FILTER(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupT
UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z);
// Fetch the current pixel coordinate
- uint2 centerCoord = groupId * DIFFUSE_DENOISER_TILE_SIZE + groupThreadId;
+ uint2 currentCoord = groupId * DIFFUSE_DENOISER_TILE_SIZE + groupThreadId;
+ #if FULL_RESOLUTION_INPUT
+ uint2 sourceCoord = currentCoord;
+ #else
+ uint2 sourceCoord = currentCoord * 2;
+ #endif
// Read the central position
- const BilateralData center = TapBilateralData(centerCoord);
+ const BilateralData center = TapBilateralData(sourceCoord);
// If this is a background pixel, we are done
if (center.z01 == 1.0)
{
#if SINGLE_CHANNEL
- _DenoiseOutputTextureRW[COORD_TEXTURE2D_X(centerCoord)] = 0.0;
+ _DenoiseOutputTextureRW[COORD_TEXTURE2D_X(currentCoord)] = 0.0;
#else
- _DenoiseOutputTextureRW[COORD_TEXTURE2D_X(centerCoord)] = float4(0.0, 0.0, 0.0, 1.0);
+ _DenoiseOutputTextureRW[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, 1.0);
#endif
}
@@ -97,7 +104,7 @@ void BILATERAL_FILTER(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupT
const float sigma = 0.9 * denoisingRadius;
// Index of the pixel in the 2x2 group that are used for the half res filter
- int localIndex = (centerCoord.x & 1) + (centerCoord.y & 1) * 2;
+ int localIndex = (currentCoord.x & 1) + (currentCoord.y & 1) * 2;
// Define the sample count for this pixel. 16 samples per pixels if it is a full res or 4 if half resolution
const int numSamples = _HalfResolutionFilter ? 4 : 16;
@@ -136,7 +143,15 @@ void BILATERAL_FILTER(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupT
#endif
// Tap the data for this pixel
+ #if FULL_RESOLUTION_INPUT
uint2 tapCoord = nDC * _ScreenSize.xy;
+ #else
+ // Not all pixels can be fetched (only the 2x2 representative)
+ uint2 halfResTapCoord = (nDC * _ScreenSize.xy) / 2;
+ uint2 tapCoord = halfResTapCoord * 2;
+ #endif
+
+ // Fetch the corresponding data
const BilateralData tapData = TapBilateralData(tapCoord);
// If the tapped pixel is a background pixel or too far from the center pixel
@@ -150,10 +165,18 @@ void BILATERAL_FILTER(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupT
const float w = r > 0.001f ? gaussian(r, sigma) * ComputeBilateralWeight(center, tapData) : 1.0;
// Accumulate the new sample
- #if SINGLE_CHANNEL
- colorSum += LOAD_TEXTURE2D_X(_DenoiseInputTexture, tapCoord).x * w;
+ #if FULL_RESOLUTION_INPUT
+ #if SINGLE_CHANNEL
+ colorSum += LOAD_TEXTURE2D_X(_DenoiseInputTexture, tapCoord).x * w;
+ #else
+ colorSum += LOAD_TEXTURE2D_X(_DenoiseInputTexture, tapCoord).xyz * w;
+ #endif
#else
- colorSum += LOAD_TEXTURE2D_X(_DenoiseInputTexture, tapCoord).xyz * w;
+ #if SINGLE_CHANNEL
+ colorSum += LOAD_TEXTURE2D_X(_DenoiseInputTexture, halfResTapCoord).x * w;
+ #else
+ colorSum += LOAD_TEXTURE2D_X(_DenoiseInputTexture, halfResTapCoord).xyz * w;
+ #endif
#endif
wSum += w;
}
@@ -162,18 +185,18 @@ void BILATERAL_FILTER(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupT
if (wSum == 0.0)
{
#if SINGLE_CHANNEL
- colorSum += LOAD_TEXTURE2D_X(_DenoiseInputTexture, centerCoord).x;
+ colorSum += LOAD_TEXTURE2D_X(_DenoiseInputTexture, currentCoord).x;
#else
- colorSum += LOAD_TEXTURE2D_X(_DenoiseInputTexture, centerCoord).xyz;
+ colorSum += LOAD_TEXTURE2D_X(_DenoiseInputTexture, currentCoord).xyz;
#endif
wSum += 1.0;
}
// Normalize the result
#if SINGLE_CHANNEL
- _DenoiseOutputTextureRW[COORD_TEXTURE2D_X(centerCoord)] = colorSum / wSum;
+ _DenoiseOutputTextureRW[COORD_TEXTURE2D_X(currentCoord)] = colorSum / wSum;
#else
- _DenoiseOutputTextureRW[COORD_TEXTURE2D_X(centerCoord)] = float4(colorSum / wSum, 1.0);
+ _DenoiseOutputTextureRW[COORD_TEXTURE2D_X(currentCoord)] = float4(colorSum / wSum, 1.0);
#endif
}
@@ -192,7 +215,11 @@ void FillGatherDataLDS(uint groupIndex, uint2 pixelCoord)
float3 lighting = LOAD_TEXTURE2D_X(_DenoiseInputTexture, sampleCoord).xyz;
gs_cacheLighting[groupIndex] = PackToR11G11B10f(lighting);
#endif
+ #if FULL_RESOLUTION_INPUT
float depthValue = LOAD_TEXTURE2D_X(_DepthTexture, sampleCoord).x;
+ #else
+ float depthValue = LOAD_TEXTURE2D_X(_DepthTexture, sampleCoord * 2).x;
+ #endif
gs_cacheDepth[groupIndex] = depthValue;
}
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Denoising/TemporalFilter.compute b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Denoising/TemporalFilter.compute
index 4e0d3bd4060..1ccbada9dd6 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Denoising/TemporalFilter.compute
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Denoising/TemporalFilter.compute
@@ -13,6 +13,8 @@
#pragma kernel OutputHistoryArray OUTPUT_IS_ARRAY
+#pragma multi_compile _ FULL_RESOLUTION_FILTER
+
// Common includes
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/CommonLighting.hlsl"
@@ -163,43 +165,53 @@ void TEMPORAL_ACCUMULATION(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 g
UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z);
// Fetch the current pixel coordinate
- uint2 centerCoord = groupId * TEMPORAL_FILTER_TILE_SIZE + groupThreadId;
+ uint2 currentCoord = groupId * TEMPORAL_FILTER_TILE_SIZE + groupThreadId;
+ #ifdef FULL_RESOLUTION_FILTER
+ uint2 sourceCoord = currentCoord;
+ #else
+ uint2 sourceCoord = currentCoord * 2;
+ #endif
// If the depth of this pixel is the depth of the background, we can end the process right away
- float depth = LOAD_TEXTURE2D_X(_DepthTexture, centerCoord).r;
+ float depth = LOAD_TEXTURE2D_X(_DepthTexture, sourceCoord).r;
if (depth == UNITY_RAW_FAR_CLIP_VALUE)
{
- _AccumulationOutputTextureRW[COORD_TEXTURE2D_X(centerCoord)] = float4(0.0, 0.0, 0.0, 0);
+ _AccumulationOutputTextureRW[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, 0);
return;
}
// Fetch the position of the current pixel
- PositionInputs posInputs = GetPositionInput(centerCoord, _ScreenSize.zw, depth, UNITY_MATRIX_I_VP, GetWorldToViewMatrix());
+ PositionInputs posInputs = GetPositionInput(sourceCoord, _ScreenSize.zw, depth, UNITY_MATRIX_I_VP, GetWorldToViewMatrix());
// Compute the velocity information for this pixel
float2 velocity = float2(0.0, 0.0);
- DecodeMotionVector(LOAD_TEXTURE2D_X(_CameraMotionVectorsTexture, (float2)centerCoord), velocity);
+ DecodeMotionVector(LOAD_TEXTURE2D_X(_CameraMotionVectorsTexture, (float2)sourceCoord), velocity);
+ #ifdef FULL_RESOLUTION_FILTER
float2 historyTapCoord = (float2)((posInputs.positionNDC - velocity) * _ScreenSize.xy);
+ #else
+ // Remap the history tap coord into half res space and compensate for the half resolution half pixel shift
+ float2 historyTapCoord = (float2)((posInputs.positionNDC - velocity) * _ScreenSize.xy * 0.5) + 0.25;
+ #endif
// Fetch the current value, history value and current sample count
#if HISTORY_IS_ARRAY
#if SINGLE_CHANNEL
- float color = LOAD_TEXTURE2D_X(_DenoiseInputTexture, centerCoord).x;
+ float color = LOAD_TEXTURE2D_X(_DenoiseInputTexture, currentCoord).x;
float history = dot(_DenoisingHistoryMask, SAMPLE_TEXTURE2D_ARRAY_LOD(_HistoryBuffer, s_linear_clamp_sampler, historyTapCoord * _ScreenSize.zw * _RTHandleScaleHistory.zw, INDEX_TEXTURE2D_ARRAY_X(_DenoisingHistorySlice), 0));
float sampleCount = dot(_DenoisingHistoryMask, SAMPLE_TEXTURE2D_ARRAY_LOD(_HistoryValidityBuffer, s_linear_clamp_sampler, historyTapCoord * _ScreenSize.zw * _RTHandleScaleHistory.zw, INDEX_TEXTURE2D_ARRAY_X(_DenoisingHistorySlice), 0));
#else
- float3 color = LOAD_TEXTURE2D_X(_DenoiseInputTexture, centerCoord).xyz;
+ float3 color = LOAD_TEXTURE2D_X(_DenoiseInputTexture, currentCoord).xyz;
// In case we are processing a color texture, it is always stored in the first three channels
float3 history = SAMPLE_TEXTURE2D_ARRAY_LOD(_HistoryBuffer, s_linear_clamp_sampler, historyTapCoord * _ScreenSize.zw * _RTHandleScaleHistory.zw, INDEX_TEXTURE2D_ARRAY_X(_DenoisingHistorySlice), 0).xyz;
float sampleCount = SAMPLE_TEXTURE2D_ARRAY_LOD(_HistoryValidityBuffer, s_linear_clamp_sampler, historyTapCoord * _ScreenSize.zw * _RTHandleScaleHistory.zw, INDEX_TEXTURE2D_ARRAY_X(_DenoisingHistorySlice), 0).x;
#endif
#else
#if SINGLE_CHANNEL
- float color = LOAD_TEXTURE2D_X(_DenoiseInputTexture, centerCoord).x;
+ float color = LOAD_TEXTURE2D_X(_DenoiseInputTexture, currentCoord).x;
float2 history = SAMPLE_TEXTURE2D_X_LOD(_HistoryBuffer, s_linear_clamp_sampler, historyTapCoord * _ScreenSize.zw * _RTHandleScaleHistory.zw, 0).xy;
float sampleCount = history.y;
#else
- float3 color = LOAD_TEXTURE2D_X(_DenoiseInputTexture, centerCoord).xyz;
+ float3 color = LOAD_TEXTURE2D_X(_DenoiseInputTexture, currentCoord).xyz;
float4 history = SAMPLE_TEXTURE2D_X_LOD(_HistoryBuffer, s_linear_clamp_sampler, historyTapCoord * _ScreenSize.zw * _RTHandleScaleHistory.zw, 0);
history.xyz = max(history.xyz, 0);
float sampleCount = history.w;
@@ -225,13 +237,13 @@ void TEMPORAL_ACCUMULATION(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 g
#endif
// Get the velocity of the current sample
- float movingIntersection = _OccluderMotionRejection ? LOAD_TEXTURE2D_X(_VelocityBuffer, centerCoord).r > MINIMAL_MOTION_DISTANCE : 0.0f;
+ float movingIntersection = _OccluderMotionRejection ? LOAD_TEXTURE2D_X(_VelocityBuffer, currentCoord).r > MINIMAL_MOTION_DISTANCE : 0.0f;
// Accumulation factor that tells us how much we need to keep the history data
float accumulationFactor = 0.0;
// Evaluate our validation mask
- float validationMask = (_ReceiverMotionRejection ? (LOAD_TEXTURE2D_X(_ValidationBuffer, centerCoord).x) : (LOAD_TEXTURE2D_X(_ValidationBuffer, centerCoord).x & (~HISTORYREJECTIONFLAGS_MOTION))) != 0 ? 0.0f : 1.0f;
+ float validationMask = (_ReceiverMotionRejection ? (LOAD_TEXTURE2D_X(_ValidationBuffer, sourceCoord).x) : (LOAD_TEXTURE2D_X(_ValidationBuffer, sourceCoord).x & (~HISTORYREJECTIONFLAGS_MOTION))) != 0 ? 0.0f : 1.0f;
// Combine the validation mask with the history validity
bool historyInvalid = ((float)validationMask * _HistoryValidity) < 1.0f;
@@ -255,9 +267,9 @@ void TEMPORAL_ACCUMULATION(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 g
// Store our accumulated value
#if SINGLE_CHANNEL
- _AccumulationOutputTextureRW[COORD_TEXTURE2D_X(centerCoord)] = float4(color * (1.0 - accumulationFactor) + history.x * accumulationFactor, sampleCount, 0.0, 1.0);
+ _AccumulationOutputTextureRW[COORD_TEXTURE2D_X(currentCoord)] = float4(color * (1.0 - accumulationFactor) + history.x * accumulationFactor, sampleCount, 0.0, 1.0);
#else
- _AccumulationOutputTextureRW[COORD_TEXTURE2D_X(centerCoord)] = float4(color * (1.0 - accumulationFactor) + history.xyz * accumulationFactor, sampleCount);
+ _AccumulationOutputTextureRW[COORD_TEXTURE2D_X(currentCoord)] = float4(color * (1.0 - accumulationFactor) + history.xyz * accumulationFactor, sampleCount);
#endif
}
@@ -268,7 +280,11 @@ void CopyHistory(uint3 dispatchThreadId : SV_DispatchThreadID)
{
UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z);
+ #ifdef FULL_RESOLUTION_FILTER
if (any(dispatchThreadId.xy > uint2(_ScreenSize.xy)))
+ #else
+ if (any(dispatchThreadId.xy > uint2(_ScreenSize.xy / 2)))
+ #endif
return; // Out of bounds, discard
_DenoiseOutputTextureRW[COORD_TEXTURE2D_X(dispatchThreadId.xy)] = LOAD_TEXTURE2D_X(_DenoiseInputTexture, dispatchThreadId.xy);
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/RaytracingLightLoop.hlsl b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/RaytracingLightLoop.hlsl
index 95837be1e9a..79af86856d8 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/RaytracingLightLoop.hlsl
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/RaytracingLightLoop.hlsl
@@ -84,7 +84,7 @@ void LightLoop( float3 V, PositionInputs posInput, PreLightData preLightData, BS
// Add the traced reflection (if any)
if (reflection.w == 1.0)
{
- IndirectLighting lighting = EvaluateBSDF_RaytracedReflection(context, bsdfData, preLightData, reflection);
+ IndirectLighting lighting = EvaluateBSDF_RaytracedReflection(context, bsdfData, preLightData, reflection.xyz);
AccumulateIndirectLighting(lighting, aggregateLighting);
reflectionHierarchyWeight = 1.0;
}
@@ -95,7 +95,7 @@ void LightLoop( float3 V, PositionInputs posInput, PreLightData preLightData, BS
{
IndirectLighting indirect;
ZERO_INITIALIZE(IndirectLighting, indirect);
- IndirectLighting lighting = EvaluateBSDF_RaytracedRefraction(context, preLightData, transmission);
+ IndirectLighting lighting = EvaluateBSDF_RaytracedRefraction(context, preLightData, transmission.xyz);
AccumulateIndirectLighting(lighting, aggregateLighting);
refractionHierarchyWeight = 1.0;
}