diff --git a/com.unity.render-pipelines.core/Runtime/Textures/RTHandle.cs b/com.unity.render-pipelines.core/Runtime/Textures/RTHandle.cs
index e06218a8585..d82f4a9a175 100644
--- a/com.unity.render-pipelines.core/Runtime/Textures/RTHandle.cs
+++ b/com.unity.render-pipelines.core/Runtime/Textures/RTHandle.cs
@@ -155,6 +155,9 @@ public void Release()
/// Input size scaled by the RTHandle scale factor.
public Vector2Int GetScaledSize(Vector2Int refSize)
{
+ if (!useScaling)
+ return refSize;
+
if (scaleFunc != null)
{
return scaleFunc(refSize);
diff --git a/com.unity.render-pipelines.core/ShaderLibrary/AreaLighting.hlsl b/com.unity.render-pipelines.core/ShaderLibrary/AreaLighting.hlsl
index 368095cdaa1..95a53c3e993 100644
--- a/com.unity.render-pipelines.core/ShaderLibrary/AreaLighting.hlsl
+++ b/com.unity.render-pipelines.core/ShaderLibrary/AreaLighting.hlsl
@@ -21,7 +21,8 @@ real3 ComputeEdgeFactor(real3 V1, real3 V2)
if (V1oV2 < 0)
{
// Undo range reduction.
- y = PI * rsqrt(saturate(1 - V1oV2 * V1oV2)) - y;
+ const float epsilon = 1e-5f;
+ y = PI * rsqrt(max(epsilon, saturate(1 - V1oV2 * V1oV2))) - y;
}
return V1xV2 * y;
diff --git a/com.unity.render-pipelines.high-definition/CHANGELOG.md b/com.unity.render-pipelines.high-definition/CHANGELOG.md
index 8449d87011f..c4c2bd009e2 100644
--- a/com.unity.render-pipelines.high-definition/CHANGELOG.md
+++ b/com.unity.render-pipelines.high-definition/CHANGELOG.md
@@ -153,6 +153,10 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
- Fixed broken Lanczos filter artifacts on ps4, caused by a very aggressive epsilon (case 1328904)
- Fixed global Settings ignore the path set via Fix All in HDRP wizard (case 1327978)
- Fixed issue with an assert getting triggered with OnDemand shadows.
+- Fixed GBuffer clear option in FrameSettings not working
+- Fixed usage of Panini Projection with floating point HDRP and Post Processing color buffers.
+- Fixed a NaN generating in Area light code.
+- Fixed CustomPassUtils scaling issues when used with RTHandles allocated from a RenderTexture.
### Changed
- Changed Window/Render Pipeline/HD Render Pipeline Wizard to Window/Rendering/HDRP Wizard
@@ -222,6 +226,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
- Enforced more consistent shading normal computation for path tracing, so that impossible shading/geometric normal combinations are avoided (case 1323455).
- Default black texture XR is now opaque (alpha = 1).
- Changed ray tracing acceleration structure build, so that only meshes with HDRP materials are included (case 1322365).
+- Changed default sidedness to double, when a mesh with a mix of single and double-sided materials is added to the ray tracing acceleration structure (case 1323451).
## [11.0.0] - 2020-10-21
diff --git a/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Getting-Started.md b/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Getting-Started.md
index 7249b0ab5e3..b58ef3c301d 100644
--- a/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Getting-Started.md
+++ b/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Getting-Started.md
@@ -1,8 +1,6 @@
# Getting started with ray tracing
-The High Definition Render Pipeline (HDRP) includes preview ray tracing support from Unity 2019.3. Ray tracing is a feature that allows you to access data that is not on screen. For example, you can use it to request position data, normal data, or lighting data, and then use this data to compute quantities that are hard to approximate using classic rasterization techniques.
-
-While film production uses ray tracing extensively, its resource intensity has limited its use to offline rendering for a long time. Now, with recent advances in GPU hardware, you can make use of ray tracing effect in real time.
+The High Definition Render Pipeline (HDRP) includes preview ray tracing support from Unity 2019.3. Ray tracing allows you to access data that is not on screen. For example, you can use it to request position data, normal data, or lighting data, and then use this data to compute quantities that are hard to approximate using classic rasterization techniques.
This document covers:
@@ -161,7 +159,7 @@ Now that your HDRP Project supports ray tracing, there are a few steps you must
#### Frame Settings
-To make HDRP calculates ray tracing effects for [Cameras](HDRP-Camera.md) in your Scene, make sure your Cameras use [Frame Settings](Frame-Settings.md) that have ray tracing enabled. You can enable ray tracing for all Cameras by default, or you can enable ray tracing for specific Cameras in your Scene.
+To make HDRP calculate ray tracing effects for [Cameras](HDRP-Camera.md) in your Scene, make sure your Cameras use [Frame Settings](Frame-Settings.md) that have ray tracing enabled. You can enable ray tracing for all Cameras by default, or you can enable ray tracing for specific Cameras in your Scene.
To enable ray tracing by default:
@@ -188,6 +186,20 @@ To check whether it is possible to use ray tracing in a Scene, HDRP includes a m
1. Click **Edit > Rendering > Check Scene Content for HDRP Ray Tracing**.
2. In the Console window (menu: **Window > General > Console**), check if there are any warnings.
+
+
+# Ray tracing and Meshes
+
+HDRP changes how it handles Meshes in your scene when you integrate a ray traced effect into your project.
+
+When you enable ray tracing, HDRP automatically creates a ray tracing acceleration structure. This structure allows Unity to calculate ray tracing for Meshes in your scene efficiently in real time.
+
+As a result, ray tracing can change how some Meshes appear in your scene in the following ways:
+
+- If your Mesh has a Material assigned that does not have the HDRenderPipeline tag, HDRP does not add it to the acceleration structure and does not apply any ray traced effects to the mesh as a result.
+- If your Mesh has a Decal Material assigned, HDRP does not add it to the acceleration structure and the Mesh does not appear in your scene.
+- If a Mesh has a combination of Materials that are single and double-sided, HDRP flags all Materials you have assigned to this mesh as double-sided.
+
## Ray tracing effects overview
@@ -209,7 +221,7 @@ HDRP includes two ray tracing modes that define how it evaluates certain ray-tra
* **Performance**: This mode targets real-time applications. If you select this mode, ray-traced effects include presets that you can change to balance performance with quality.
* **Quality**: This mode targets technical demos and applications that want the best quality results.
-Depending on which ray tracing mode you select, HDRP may expose difference properties for some ray-traced effects.
+Depending on which ray tracing mode you select, HDRP may expose different properties for some ray-traced effects.
You can change which ray tracing mode HDRP uses on either a Project level or effect level. To change it for your entire Project:
@@ -224,7 +236,7 @@ If you select **Both**, you can change the ray tracing mode for each ray-traced
## Ray tracing project
-You can find a small ray tracing project that contains all the effects mention above here:
+You can find a small ray tracing project that contains all the effects mentioned above here:
https://github.com/Unity-Technologies/SmallOfficeRayTracing
This Project is already set up with ray tracing support.
@@ -238,7 +250,7 @@ There is no support for ray tracing on platforms other than DX12 for now.
HDRP ray tracing in Unity 2020.2 has the following limitations:
- Does not support vertex animation.
-- Does not supports decals.
+- Does not support decals.
- Does not support tessellation.
- Does not support per pixel displacement (parallax occlusion mapping, height map, depth offset).
- Does not support VFX and Terrain.
@@ -247,6 +259,7 @@ HDRP ray tracing in Unity 2020.2 has the following limitations:
- For renderers that have [LODs](https://docs.unity3d.com/2019.3/Documentation/Manual/LevelOfDetail.html), the ray tracing acceleration structure only includes the highest level LOD and ignores the lower LODs.
- Does not support [Graphics.DrawMesh](https://docs.unity3d.com/ScriptReference/Graphics.DrawMesh.html).
- Ray tracing is not supported when rendering [Reflection Probes](Reflection-Probe.md).
+- HDRP does not support [orthographic projection](HDRP-Camera.md). If you enable orthographic projection mode, you might experience rendering problems for Transparent Materials, volumetrics and planar reflections.
### Unsupported shader graph nodes for ray tracing
diff --git a/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Path-Tracing.md b/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Path-Tracing.md
index 1c2588aeef4..af422d47790 100644
--- a/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Path-Tracing.md
+++ b/com.unity.render-pipelines.high-definition/Documentation~/Ray-Tracing-Path-Tracing.md
@@ -1,6 +1,6 @@
# Path tracing
-Path tracing is a ray tracing algorithm that sends rays from the Camera and, when a ray hits a reflective or refractive surface, recurses the process until it reaches a light source. The series of rays from the Camera to the Light forms a "path".
+Path tracing is a ray tracing algorithm that sends rays from the Camera and, when a ray hits a reflective or refractive surface, recurses the process until it reaches a light source. The series of rays from the Camera to the Light form a "path".
It enables HDRP to compute many different effects (such as hard or soft shadows, mirror or glossy reflections and refractions, and indirect illumination) in one single unified process.
@@ -14,7 +14,7 @@ Noisy image with **Maximum Samples** set to 1
Clean image with **Maximum Samples** set to 256
-The current implementation for path tracing in the High Definition Render Pipeline (HDRP) accumulates paths for every pixel up to a maximum count, unless the Camera moves. If the Camera moves, HDRP restarts the path accumulation. Path tracing supports Lit, LayeredLit and Unlit materials, and area, point, directional and environment lights.
+The current implementation for path tracing in the High Definition Render Pipeline (HDRP) accumulates paths for every pixel up to a maximum count unless the Camera moves. If the Camera moves, HDRP restarts the path accumulation. Path tracing supports Lit, LayeredLit, and Unlit materials, and area, point, directional, and environment lights.
## Setting up path tracing
@@ -62,11 +62,12 @@ There is no support for path tracing on platforms other than DX12 for now.
HDRP path tracing in Unity 2020.2 has the following limitations:
+- If a Mesh in your scene has a Material assigned that does not have the `HDRenderPipeline` tag, the mesh will not appear in your scene. For more information, see [Ray tracing and Meshes](Ray-Tracing-Getting-Started.md#RayTracingMeshes).
- Does not support 3D Text and TextMeshPro.
-- Does not support Shader Graph nodes that use derivatives (ex : normal from textures).
+- Does not support Shader Graph nodes that use derivatives (for example, a normal map that derives from a texture).
- Does not support decals.
- Does not support tessellation.
-- Does not support Tube and Disc shaped Area Light.
+- Does not support Tube and Disc-shaped Area Lights.
- Does not support Translucent Opaque Materials.
- Does not support several of HDRP's Materials. This includes Eye, StackLit, Hair, Decal.
- Does not support per-pixel displacement (parallax occlusion mapping, height map, depth offset).
diff --git a/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Camera/HDCameraUI.Rendering.Skin.cs b/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Camera/HDCameraUI.Rendering.Skin.cs
index f910cc97218..7a27ce64cee 100644
--- a/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Camera/HDCameraUI.Rendering.Skin.cs
+++ b/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Camera/HDCameraUI.Rendering.Skin.cs
@@ -14,8 +14,8 @@ class Styles
public static readonly GUIContent dithering = EditorGUIUtility.TrTextContent("Dithering", "Should we apply 8-bit dithering to the final render?");
public static readonly GUIContent stopNaNs = EditorGUIUtility.TrTextContent("Stop NaNs", "Automatically replaces NaN/Inf in shaders by a black pixel to avoid breaking some effects. This will slightly affect performances and should only be used if you experience NaN issues that you can't fix.");
- public static readonly GUIContent cullingMask = EditorGUIUtility.TrTextContent("Culling Mask");
- public static readonly GUIContent occlusionCulling = EditorGUIUtility.TrTextContent("Occlusion Culling");
+ public static readonly GUIContent cullingMask = EditorGUIUtility.TrTextContent("Culling Mask", "Specifies the list of layers the camera should render.");
+ public static readonly GUIContent occlusionCulling = EditorGUIUtility.TrTextContent("Occlusion Culling", "When enabled, the camera does not render objects that are being obscured by other geometry.");
public static readonly GUIContent SMAAQualityPresetContent = EditorGUIUtility.TrTextContent("Quality Preset", "The quality preset for SMAA, low has the best performance but worst quality, High has the highest quality but worst performance.");
diff --git a/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/PaniniProjection.compute b/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/PaniniProjection.compute
index e1f03a8a5c8..bf637fd8e1f 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/PaniniProjection.compute
+++ b/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/PaniniProjection.compute
@@ -1,15 +1,17 @@
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/ShaderVariables.hlsl"
+#include "Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/PostProcessDefines.hlsl"
#pragma only_renderers d3d11 playstation xboxone xboxseries vulkan metal switch
#pragma kernel KMain
#pragma multi_compile GENERIC UNITDISTANCE
+#pragma multi_compile _ ENABLE_ALPHA
TEXTURE2D_X(_InputTexture);
-RW_TEXTURE2D_X(float3, _OutputTexture);
+RW_TEXTURE2D_X(CTYPE, _OutputTexture);
SAMPLER(sampler_LinearClamp);
@@ -125,7 +127,7 @@ void KMain(uint3 dispatchThreadId : SV_DispatchThreadID)
}
else
{
- float3 smp = SAMPLE_TEXTURE2D_X_LOD(_InputTexture, sampler_LinearClamp, ClampAndScaleUVForBilinear(coords), 0.0).xyz;
+ CTYPE smp = SAMPLE_TEXTURE2D_X_LOD(_InputTexture, sampler_LinearClamp, ClampAndScaleUVForBilinear(coords), 0.0).CTYPE_SWIZZLE;
_OutputTexture[COORD_TEXTURE2D_X(posInputs.positionSS)] = smp;
}
}
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.PostProcess.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.PostProcess.cs
index 9e68add632c..8a364795a1a 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.PostProcess.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.PostProcess.cs
@@ -3019,6 +3019,9 @@ TextureHandle PaniniProjectionPass(RenderGraph renderGraph, HDCamera hdCamera, T
else
passData.paniniProjectionCS.EnableKeyword("UNITDISTANCE");
+ if (m_EnableAlpha)
+ passData.paniniProjectionCS.EnableKeyword("ENABLE_ALPHA");
+
passData.paniniParams = new Vector4(viewExtents.x, viewExtents.y, paniniD, paniniS);
passData.paniniProjectionKernel = passData.paniniProjectionCS.FindKernel("KMain");
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Prepass.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Prepass.cs
index 07a040ff2a6..44c5320dbf9 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Prepass.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Prepass.cs
@@ -56,10 +56,9 @@ void CleanupPrepass()
CoreUtils.Destroy(m_DownsampleDepthMaterial);
}
- bool NeedClearGBuffer()
+ bool NeedClearGBuffer(HDCamera hdCamera)
{
- // TODO: Add an option to force clear
- return m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled();
+ return m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled() || hdCamera.frameSettings.IsEnabled(FrameSettingsField.ClearGBuffers);
}
HDUtils.PackedMipChainInfo GetDepthBufferMipChainInfo()
@@ -119,7 +118,7 @@ TextureHandle CreateDepthBuffer(RenderGraph renderGraph, bool clear, MSAASamples
return renderGraph.CreateTexture(depthDesc);
}
- TextureHandle CreateNormalBuffer(RenderGraph renderGraph, MSAASamples msaaSamples)
+ TextureHandle CreateNormalBuffer(RenderGraph renderGraph, HDCamera hdCamera, MSAASamples msaaSamples)
{
bool msaa = msaaSamples != MSAASamples.None;
#if UNITY_2020_2_OR_NEWER
@@ -131,7 +130,7 @@ TextureHandle CreateNormalBuffer(RenderGraph renderGraph, MSAASamples msaaSample
TextureDesc normalDesc = new TextureDesc(Vector2.one, true, true)
{
- colorFormat = GraphicsFormat.R8G8B8A8_UNorm, clearBuffer = NeedClearGBuffer(), clearColor = Color.black, bindTextureMS = msaa, msaaSamples = msaaSamples, enableRandomWrite = !msaa, name = msaa ? "NormalBufferMSAA" : "NormalBuffer"
+ colorFormat = GraphicsFormat.R8G8B8A8_UNorm, clearBuffer = NeedClearGBuffer(hdCamera), clearColor = Color.black, bindTextureMS = msaa, msaaSamples = msaaSamples, enableRandomWrite = !msaa, name = msaa ? "NormalBufferMSAA" : "NormalBuffer"
#if UNITY_2020_2_OR_NEWER
, fastMemoryDesc = fastMemDesc
#endif
@@ -406,7 +405,8 @@ bool RenderDepthPrepass(RenderGraph renderGraph, CullingResults cull, HDCamera h
{
decalBuffer = renderGraph.defaultResources.blackTextureXR;
output.depthAsColor = CreateDepthAsColorBuffer(renderGraph, hdCamera.msaaSamples);
- output.normalBuffer = CreateNormalBuffer(renderGraph, hdCamera.msaaSamples);
+ output.normalBuffer = CreateNormalBuffer(renderGraph, hdCamera, hdCamera.msaaSamples);
+
return false;
}
@@ -464,7 +464,8 @@ bool RenderDepthPrepass(RenderGraph renderGraph, CullingResults cull, HDCamera h
int mrtIndex = 0;
if (msaa)
output.depthAsColor = builder.UseColorBuffer(CreateDepthAsColorBuffer(renderGraph, hdCamera.msaaSamples), mrtIndex++);
- output.normalBuffer = builder.UseColorBuffer(CreateNormalBuffer(renderGraph, hdCamera.msaaSamples), mrtIndex++);
+ output.normalBuffer = builder.UseColorBuffer(CreateNormalBuffer(renderGraph, hdCamera, hdCamera.msaaSamples), mrtIndex++);
+
if (decalLayersEnabled)
decalBuffer = builder.UseColorBuffer(decalBuffer, mrtIndex++);
@@ -568,7 +569,7 @@ struct GBufferOutput
void SetupGBufferTargets(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle sssBuffer, TextureHandle vtFeedbackBuffer, ref PrepassOutput prepassOutput, FrameSettings frameSettings, RenderGraphBuilder builder)
{
- bool clearGBuffer = NeedClearGBuffer();
+ bool clearGBuffer = NeedClearGBuffer(hdCamera);
bool lightLayers = frameSettings.IsEnabled(FrameSettingsField.LightLayers);
bool shadowMasks = frameSettings.IsEnabled(FrameSettingsField.Shadowmask);
@@ -705,7 +706,8 @@ void ResolvePrepassBuffers(RenderGraph renderGraph, HDCamera hdCamera, ref Prepa
output.resolvedDepthBuffer = builder.UseDepthBuffer(CreateDepthBuffer(renderGraph, true, MSAASamples.None), DepthAccess.Write);
output.depthValuesMSAA = builder.UseColorBuffer(depthValuesBuffer, 0);
- output.resolvedNormalBuffer = builder.UseColorBuffer(CreateNormalBuffer(renderGraph, MSAASamples.None), 1);
+ output.resolvedNormalBuffer = builder.UseColorBuffer(CreateNormalBuffer(renderGraph, hdCamera, MSAASamples.None), 1);
+
if (passData.needMotionVectors)
output.resolvedMotionVectorsBuffer = builder.UseColorBuffer(CreateMotionVectorBuffer(renderGraph, false, MSAASamples.None), 2);
else
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs
index 381cf5a670b..7bb176c9cbb 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs
@@ -76,7 +76,7 @@ void ExecuteWithRenderGraph(RenderRequest renderRequest,
LightingBuffers lightingBuffers = new LightingBuffers();
lightingBuffers.diffuseLightingBuffer = CreateDiffuseLightingBuffer(m_RenderGraph, hdCamera.msaaSamples);
- lightingBuffers.sssBuffer = CreateSSSBuffer(m_RenderGraph, hdCamera.msaaSamples);
+ lightingBuffers.sssBuffer = CreateSSSBuffer(m_RenderGraph, hdCamera, hdCamera.msaaSamples);
var prepassOutput = RenderPrepass(m_RenderGraph, colorBuffer, lightingBuffers.sssBuffer, vtFeedbackBuffer, cullingResults, customPassCullingResults, hdCamera, aovRequest, aovBuffers);
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.SubsurfaceScattering.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.SubsurfaceScattering.cs
index 46ea6f782b1..7a0e216c628 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.SubsurfaceScattering.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.SubsurfaceScattering.cs
@@ -162,7 +162,7 @@ static bool NeedTemporarySubsurfaceBuffer()
// Albedo + SSS Profile and mask / Specular occlusion (when no SSS)
// This will be used during GBuffer and/or forward passes.
- TextureHandle CreateSSSBuffer(RenderGraph renderGraph, MSAASamples msaaSamples)
+ TextureHandle CreateSSSBuffer(RenderGraph renderGraph, HDCamera hdCamera, MSAASamples msaaSamples)
{
bool msaa = msaaSamples != MSAASamples.None;
#if UNITY_2020_2_OR_NEWER
@@ -178,7 +178,7 @@ TextureHandle CreateSSSBuffer(RenderGraph renderGraph, MSAASamples msaaSamples)
enableRandomWrite = !msaa,
bindTextureMS = msaa,
msaaSamples = msaaSamples,
- clearBuffer = NeedClearGBuffer(),
+ clearBuffer = NeedClearGBuffer(hdCamera),
clearColor = Color.clear,
name = msaa ? "SSSBufferMSAA" : "SSSBuffer"
#if UNITY_2020_2_OR_NEWER
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRaytracingManager.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRaytracingManager.cs
index bb3ff2fdcd5..1a32a5ae97e 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRaytracingManager.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/HDRaytracingManager.cs
@@ -186,7 +186,7 @@ AccelerationStructureStatus AddInstanceToRAS(Renderer currentRenderer,
// We need to build the instance flag for this renderer
uint instanceFlag = 0x00;
- bool singleSided = false;
+ bool doubleSided = false;
bool materialIsOnlyTransparent = true;
bool hasTransparentSubMaterial = false;
@@ -228,9 +228,9 @@ AccelerationStructureStatus AddInstanceToRAS(Renderer currentRenderer,
else if (transparentMaterial)
subMeshFlagArray[meshIdx] |= RayTracingSubMeshFlags.UniqueAnyHitCalls;
- // Force it to be non single sided if it has the keyword if there is a reason
- bool doubleSided = currentMaterial.doubleSidedGI || currentMaterial.IsKeywordEnabled("_DOUBLESIDED_ON");
- singleSided |= !doubleSided;
+ // Check if we want to enable double-sidedness for the mesh
+ // (note that a mix of single and double-sided materials will result in a double-sided mesh in the AS)
+ doubleSided |= currentMaterial.doubleSidedGI || currentMaterial.IsKeywordEnabled("_DOUBLESIDED_ON");
// Check if the material has changed since last time we were here
if (!m_MaterialsDirty)
@@ -250,12 +250,9 @@ AccelerationStructureStatus AddInstanceToRAS(Renderer currentRenderer,
}
}
- // If the mesh was not valid, exclude it
+ // If the mesh was not valid, exclude it (without affecting sidedness)
if (!validMesh)
- {
subMeshFlagArray[meshIdx] = RayTracingSubMeshFlags.Disabled;
- singleSided = true;
- }
}
// If the material is considered opaque, every sub-mesh has to be enabled and with unique any hit calls
@@ -322,7 +319,7 @@ AccelerationStructureStatus AddInstanceToRAS(Renderer currentRenderer,
if (instanceFlag == 0) return AccelerationStructureStatus.Added;
// Add it to the acceleration structure
- m_CurrentRAS.AddInstance(currentRenderer, subMeshFlags: subMeshFlagArray, enableTriangleCulling: singleSided, mask: instanceFlag);
+ m_CurrentRAS.AddInstance(currentRenderer, subMeshFlags: subMeshFlagArray, enableTriangleCulling: !doubleSided, mask: instanceFlag);
// Indicates that a transform has changed in our scene (mesh or light)
m_TransformDirty |= currentRenderer.transform.hasChanged;
diff --git a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassUtils.cs b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassUtils.cs
index cb1ae277836..6ca6e8ad3f5 100644
--- a/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassUtils.cs
+++ b/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/CustomPass/CustomPassUtils.cs
@@ -569,7 +569,11 @@ internal static void SetRenderTargetWithScaleBias(in CustomPassContext ctx, Mate
{
// viewport with RT handle scale and scale factor:
Rect viewport = new Rect();
- Vector2 destSize = viewport.size = destination.GetScaledSize(destination.rtHandleProperties.currentViewportSize);
+ if (destination.useScaling)
+ viewport.size = destination.GetScaledSize(destination.rtHandleProperties.currentViewportSize);
+ else
+ viewport.size = new Vector2Int(destination.rt.width, destination.rt.height);
+ Vector2 destSize = viewport.size;
viewport.position = new Vector2(viewport.size.x * destScaleBias.z, viewport.size.y * destScaleBias.w);
viewport.size *= new Vector2(destScaleBias.x, destScaleBias.y);