Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
[SRP] Graphics SRP Devs 0010
Fix yamato failures related to the water system and the foam mask texture. (#19445)
Add RT shadows for Pyramid and Box shaped Spot Lights (#16440)
[HDRP] Improve shader stripping (#18640) 
Fix global scene bounds not reloaded (#18879)
[Water] ShaderGraph UX improvements (#18831) 
Path Tracer to include baked lights (UUM-3333) (#19405) 
Update foam mask texture (#19645)
APV URP implementation (#16349) 
[HDRP] Update various Yamato screenshots + fix compilation issues (#1… 
[HDRP] Add support & coverage for Tile/Cluster debug mode (#19651) 
Initial commit moving the LineRendering API out of Core SRP and into HDRP. (#19609)
[HDRP] Transparency refactor - first step (#19110)
[HDRP] Various Yamato fix (#19864) 
[HDRP] Fix Yamato screenshots - bis (#19866)
  • Loading branch information
sebastienlagarde authored and Evergreen committed Dec 19, 2022
1 parent 5ba31b8 commit 08a6a72
Show file tree
Hide file tree
Showing 291 changed files with 21,308 additions and 2,271 deletions.
Expand Up @@ -1972,6 +1972,7 @@ public static bool ModifyProfileFromLoadedData(ProbeVolumeBakingSet profile)

profile.simplificationLevels = (int)Mathf.Log(data.asset.cellSizeInBricks, 3);
profile.minDistanceBetweenProbes = data.asset.minDistanceBetweenProbes;
globalBounds = data.asset.globalBounds;
return true;
}

Expand Down
Expand Up @@ -215,13 +215,13 @@ protected void OnEnable()
public override void OnInspectorGUI()
{
var renderPipelineAssetType = GraphicsSettings.currentRenderPipelineAssetType;
if (renderPipelineAssetType != null && renderPipelineAssetType.Name == "HDRenderPipelineAsset")
if (renderPipelineAssetType != null && (renderPipelineAssetType.Name == "HDRenderPipelineAsset" || renderPipelineAssetType.Name == "UniversalRenderPipelineAsset"))
{
serializedObject.Update();

if (!ProbeReferenceVolume.instance.isInitialized || !ProbeReferenceVolume.instance.enabledBySRP)
{
EditorGUILayout.HelpBox("The probe volumes feature is disabled. The feature needs to be enabled on the used HDRP asset.", MessageType.Warning, wide: true);
EditorGUILayout.HelpBox("The probe volumes feature is disabled. The feature needs to be enabled on the used HDRP or URP asset.", MessageType.Warning, wide: true);
return;
}

Expand Down
Expand Up @@ -58,7 +58,7 @@ public override void OnInspectorGUI()
if (!ProbeReferenceVolume.instance.isInitialized || !ProbeReferenceVolume.instance.enabledBySRP)
{
var renderPipelineAssetType = GraphicsSettings.currentRenderPipelineAssetType;
if (renderPipelineAssetType != null && renderPipelineAssetType.Name == "HDRenderPipelineAsset")
if (renderPipelineAssetType != null && (renderPipelineAssetType.Name == "HDRenderPipelineAsset" || renderPipelineAssetType.Name == "UniversalRenderPipelineAsset"))
{
EditorGUILayout.HelpBox("Probe Volume is not a supported feature by this SRP.", MessageType.Error, wide: true);
}
Expand Down
Expand Up @@ -172,6 +172,8 @@ void Initialize()
var renderPipelineAssetType = GraphicsSettings.currentRenderPipelineAssetType;
if (renderPipelineAssetType != null && renderPipelineAssetType.Name == "HDRenderPipelineAsset")
apvDisabledErrorMsg += " Make sure Light Probe System is set to Probe Volumes in the HDRP asset in use.";
if (renderPipelineAssetType != null && renderPipelineAssetType.Name == "UniversalRenderPipelineAsset")
apvDisabledErrorMsg += " Make sure Light Probe System is set to Probe Volumes in the URP asset in use.";

EditorGUILayout.HelpBox(apvDisabledErrorMsg, MessageType.Error);
EditorGUILayout.Space();
Expand Down
Expand Up @@ -17,7 +17,7 @@ class VariantCounter
{
public uint inputVariants;
public uint outputVariants;
public override string ToString() => $"Total={inputVariants}/{outputVariants}({outputVariants / (float)inputVariants * 100f:0.00}%)";
public override string ToString() => $"Total={outputVariants}/{inputVariants}({outputVariants / (float)inputVariants * 100f:0.00}%)";
}

[Serializable]
Expand Down
Expand Up @@ -6,8 +6,6 @@
#include "Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolume.hlsl"
#include "Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Debug.cs.hlsl"

#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Material/NormalBuffer.hlsl"

uniform int _ShadingMode;
uniform float _ExposureCompensation;
uniform float _ProbeSize;
Expand Down Expand Up @@ -184,12 +182,14 @@ half4 WriteFractNumber(float input, float2 texCoord)
float2 n1_uv = float2(clamp(texCoord.x*i - 2.0f, 0.0f, 1.0f), texCoord.y);
float2 n2_uv = float2(clamp(texCoord.x*i - 3.0f, 0.0f, 1.0f), texCoord.y);

half4 n0 = SampleCharacter(n0_value, n0_uv);
half4 dot = SampleCharacter(10, dot_uv);
half4 n1 = SampleCharacter(n1_value, n1_uv);
half4 n2 = SampleCharacter(n2_value, n2_uv);
if (texCoord.x <= 0.25)
return SampleCharacter(n0_value, n0_uv);
if (texCoord.x <= 0.50)
return SampleCharacter(10, dot_uv);
if (texCoord.x <= 0.75)
return SampleCharacter(n1_value, n1_uv);

return n0 * dot * n1 * n2;
return SampleCharacter(n2_value, n2_uv);
}


Expand Down
Expand Up @@ -28,9 +28,7 @@

FindSamplingData(debugPosition.xyz, debugNormal.xyz, snappedProbePosition_WS, samplingPosition_WS, samplingPositionNoAntiLeak_WS, probeDistance, normalizedOffset, validityWeight);

float3 probePosition_WS = mul(UNITY_MATRIX_M, float4(0.0f, 0.0f, 0.0f, 1.0f)).xyz;
float3 cameraPosition_WS = _WorldSpaceCameraPos;
probePosition_WS += cameraPosition_WS;
float3 probePosition_WS = GetAbsolutePositionWS(mul(UNITY_MATRIX_M, float4(0.0f, 0.0f, 0.0f, 1.0f)).xyz);

float samplingFactor = ComputeSamplingFactor(probePosition_WS, snappedProbePosition_WS, normalizedOffset, probeDistance);

Expand Down Expand Up @@ -182,4 +180,176 @@
}
#endif

#ifdef PROBE_VOLUME_DEBUG_FUNCTION_SAMPLING
v2f vert(appdata v)
{
v2f o;

float4 debugPosition = _positionNormalBuffer[0];
float4 debugNormal = _positionNormalBuffer[1];

float4 wsPos = float4(0.0f, 0.0f, 0.0f, 1.0f);
float samplingFactor = 0.0f; // probe sampling weight (when needed) is compute in vertex shader. Usefull for drawing 8 debug quads showing weights

float3 snappedProbePosition_WS; // worldspace position of main probe (a corner of the 8 probes cube)
float3 samplingPositionNoAntiLeak_WS; // worldspace sampling position after applying 'NormalBias', 'ViewBias'
float3 samplingPosition_WS; // worldspace sampling position after applying 'NormalBias', 'ViewBias' and 'ValidityAndNormalBased Leak Reduction'
float probeDistance;
float3 normalizedOffset; // normalized offset between sampling position and snappedProbePosition
float validityWeights[8];
float validityWeight = 1.0f;

FindSamplingData(debugPosition.xyz, debugNormal.xyz, snappedProbePosition_WS, samplingPosition_WS, samplingPositionNoAntiLeak_WS, probeDistance, normalizedOffset, validityWeights);

// QUADS to write the sampling factor of each probe
// each QUAD has an individual ID in vertex color blue channel
if (v.color.z)
{
// QUAD 01
float3 quadPosition = snappedProbePosition_WS;
validityWeight = validityWeights[0];

// QUAD 02
if (abs(v.color.z - 0.2f) < 0.02f)
{
quadPosition = snappedProbePosition_WS + float3(0.0f, 1.0f, 0.0f) * probeDistance;
validityWeight = validityWeights[2];
}

// QUAD 03
if (abs(v.color.z - 0.3f) < 0.02f)
{
quadPosition = snappedProbePosition_WS + float3(1.0f, 1.0f, 0.0f) * probeDistance;
validityWeight = validityWeights[3];
}

// QUAD 04
if (abs(v.color.z - 0.4f) < 0.02f)
{
quadPosition = snappedProbePosition_WS + float3(1.0f, 0.0f, 0.0f) * probeDistance;
validityWeight = validityWeights[1];
}

// QUAD 05
if (abs(v.color.z - 0.5f) < 0.02f)
{
quadPosition = snappedProbePosition_WS + float3(0.0f, 0.0f, 1.0f) * probeDistance;
validityWeight = validityWeights[4];
}

// QUAD 06
if (abs(v.color.z - 0.6f) < 0.02f)
{
quadPosition = snappedProbePosition_WS + float3(0.0f, 1.0f, 1.0f) * probeDistance;
validityWeight = validityWeights[6];
}

// QUAD 07
if (abs(v.color.z - 0.7f) < 0.02f)
{
quadPosition = snappedProbePosition_WS + float3(1.0f, 1.0f, 1.0f) * probeDistance;
validityWeight = validityWeights[7];
}

// QUAD 08
if (abs(v.color.z - 0.8f) < 0.02f)
{
quadPosition = snappedProbePosition_WS + float3(1.0f, 0.0f, 1.0f) * probeDistance;
validityWeight = validityWeights[5];
}

samplingFactor = ComputeSamplingFactor(quadPosition, snappedProbePosition_WS, normalizedOffset, probeDistance);

float4 cameraUp = mul(UNITY_MATRIX_I_V, float4(0.0f, 1.0f, 0.0f, 0.0f));
float4 cameraRight = -mul(UNITY_MATRIX_I_V, float4(1.0f, 0.0f, 0.0f, 0.0f));

wsPos = mul(UNITY_MATRIX_M, float4(0.0f, 0.0f, 0.0f, 1.0f));
wsPos += float4(quadPosition + cameraUp.xyz * _ProbeSize / 1.5f, 0.0f);
wsPos += float4((v.vertex.x * cameraRight.xyz + v.vertex.y * cameraUp.xyz * 0.5f) * 20.0f * _ProbeSize, 0.0f);
}

// ARROW to show the position and normal of the debugged fragment
else if (v.color.y)
{
float3 forward = normalize(debugNormal.xyz);
float3 up = float3(0.0f, 1.0f, 0.0f); if (dot(up, forward) > 0.9f) { up = float3(1.0f, 0.0f, 0.0f); }
float3 right = normalize(cross(forward, up));
up = cross(right, forward);
float3x3 orientation = float3x3(
right.x, up.x, forward.x,
right.y, up.y, forward.y,
right.z, up.z, forward.z);

wsPos = float4(mul(orientation, (v.vertex.xyz * _ProbeSize * 5.0f)), 1.0f);
wsPos = mul(UNITY_MATRIX_M, wsPos);
wsPos.xyz += debugPosition.xyz;
}

// LOCATOR to debug sampling position
else
{
if (v.color.x) // DEBUG NORMAL + VIEW BIAS
{
if (_ForceDebugNormalViewBias)
{
wsPos = mul(UNITY_MATRIX_M, float4(v.vertex.xyz * _ProbeSize * 1.5f, 1.0f));
wsPos += float4(samplingPositionNoAntiLeak_WS, 0.0f);
}
else
{
DoCull(o);
return o;
}
}
else // DEBUG NORMAL + VIEW BIAS + ANTI LEAK
{
wsPos = mul(UNITY_MATRIX_M, float4(v.vertex.xyz * _ProbeSize * 3.0f, 1.0f));
wsPos += float4(snappedProbePosition_WS + normalizedOffset * probeDistance, 0.0f);
}
}

float4 pos = mul(UNITY_MATRIX_VP, wsPos);
float remappedDepth = Remap(-1.0f, 1.0f, 0.6f, 1.0f, pos.z); // remapped depth to draw gizmo on top of most other objects
o.vertex = float4(pos.x, pos.y, remappedDepth * pos.w, pos.w);
o.normal = normalize(mul(v.normal, (float3x3)UNITY_MATRIX_M));
o.color = v.color;
o.texCoord = v.texCoord;
o.samplingFactor_ValidityWeight = float2(samplingFactor, validityWeight);

return o;
}

float4 frag(v2f i) : SV_Target
{

// QUADS to write the sampling factor of each probe
if (i.color.z)
{
float samplingFactor = i.samplingFactor_ValidityWeight.x;
float validityWeight = i.samplingFactor_ValidityWeight.y;
half4 color = WriteFractNumber(samplingFactor, i.texCoord);
if (validityWeight > 0.0f)
color = lerp(half4(0.0f, 0.0f, 0.0f, 1.0f), half4(0.0f, 1.0f, 0.0f, 1.0f), color.x);
else
color = lerp(half4(1.0f, 1.0f, 1.0f, 1.0f), half4(1.0f, 0.0f, 0.0f, 1.0f), color.x);
return color;
}

// ARROW to show debugging position and normal
else if (i.color.y)
{
return _DebugArrowColor;
}

// LOCATOR to debug sampling position
else
{
if (i.color.x) // DEBUG NORMAL + VIEW BIAS
return _DebugLocator02Color;
else // DEBUG NORMAL + VIEW BIAS + ANTILEAK MODE
return _DebugLocator01Color;
}
}
#endif

#endif // PROBEVOLUMEDEBUG_FUNCTIONS_HLSL
Expand Up @@ -219,8 +219,52 @@ public void RenderDebug(Camera camera)
}
}

#if UNITY_EDITOR
static void SceneGUI(SceneView sceneView)
{
// APV debug needs to detect user keyboard and mouse position to update ProbeSamplingPositionDebug
Event e = Event.current;

if (e.control && !ProbeReferenceVolume.probeSamplingDebugData.shortcutPressed)
ProbeReferenceVolume.probeSamplingDebugData.update = ProbeSamplingDebugUpdate.Always;

if (!e.control && ProbeReferenceVolume.probeSamplingDebugData.shortcutPressed)
ProbeReferenceVolume.probeSamplingDebugData.update = ProbeSamplingDebugUpdate.Never;

ProbeReferenceVolume.probeSamplingDebugData.shortcutPressed = e.control;

if (e.clickCount > 0 && e.button == 0)
{
if (ProbeReferenceVolume.probeSamplingDebugData.shortcutPressed)
ProbeReferenceVolume.probeSamplingDebugData.update = ProbeSamplingDebugUpdate.Once;
else
ProbeReferenceVolume.probeSamplingDebugData.update = ProbeSamplingDebugUpdate.Never;
}

if (ProbeReferenceVolume.probeSamplingDebugData.update == ProbeSamplingDebugUpdate.Never)
return;

Vector2 screenCoordinates;

if (ProbeReferenceVolume.probeSamplingDebugData.forceScreenCenterCoordinates)
screenCoordinates = new Vector2(sceneView.camera.scaledPixelWidth / 2.0f, sceneView.camera.scaledPixelHeight / 2.0f);
else
screenCoordinates = HandleUtility.GUIPointToScreenPixelCoordinate(e.mousePosition);

if (screenCoordinates.x < 0 || screenCoordinates.x > sceneView.camera.scaledPixelWidth || screenCoordinates.y < 0 || screenCoordinates.y > sceneView.camera.scaledPixelHeight)
return;

ProbeReferenceVolume.probeSamplingDebugData.camera = sceneView.camera;
ProbeReferenceVolume.probeSamplingDebugData.coordinates = screenCoordinates;

}
#endif

void InitializeDebug(in ProbeVolumeSystemParameters parameters)
{
#if UNITY_EDITOR
SceneView.duringSceneGui += SceneGUI; // Used to get click and keyboard event on scene view for Probe Sampling Debug
#endif
if (parameters.supportsRuntimeDebug)
{
m_DebugMaterial = CoreUtils.CreateEngineMaterial(parameters.probeDebugShader);
Expand Down Expand Up @@ -276,6 +320,7 @@ void CleanupDebug()

#if UNITY_EDITOR
UnityEditor.Lightmapping.lightingDataCleared -= OnClearLightingdata;
SceneView.duringSceneGui -= SceneGUI;
#endif
}

Expand Down Expand Up @@ -662,6 +707,7 @@ void DrawProbeDebug(Camera camera)
{
var probeBuffer = debug.probeBuffers[i];
m_DebugMaterial.SetInt("_DebugProbeVolumeSampling", 0);
m_DebugMaterial.SetBuffer("_positionNormalBuffer", probeSamplingDebugData.positionNormalBuffer);
Graphics.DrawMeshInstanced(debugMesh, 0, m_DebugMaterial, probeBuffer, probeBuffer.Length, props, ShadowCastingMode.Off, false, 0, camera, LightProbeUsage.Off, null);
}

Expand All @@ -671,7 +717,7 @@ void DrawProbeDebug(Camera camera)
m_ProbeSamplingDebugMaterial02.SetInt("_DebugProbeVolumeSampling", 1);
props.SetFloat("_ProbeSize", probeVolumeDebug.probeSamplingDebugSize);
m_ProbeSamplingDebugMaterial02.SetBuffer("_positionNormalBuffer", probeSamplingDebugData.positionNormalBuffer);
Graphics.DrawMeshInstanced(m_DebugMesh, 0, m_ProbeSamplingDebugMaterial02, probeBuffer, probeBuffer.Length, props, ShadowCastingMode.Off, false, 0, camera, LightProbeUsage.Off, null);
Graphics.DrawMeshInstanced(debugMesh, 0, m_ProbeSamplingDebugMaterial02, probeBuffer, probeBuffer.Length, props, ShadowCastingMode.Off, false, 0, camera, LightProbeUsage.Off, null);
}

if (probeVolumeDebug.drawVirtualOffsetPush)
Expand Down
Expand Up @@ -1646,7 +1646,8 @@ public void UpdateConstantBuffer(CommandBuffer cmd, ProbeVolumeShadingParameters
shaderVars._PoolDim_CellInMeters = new Vector4(poolDim.x, poolDim.y, poolDim.z, MaxBrickSize());
shaderVars._RcpPoolDim_Padding = new Vector4(1.0f / poolDim.x, 1.0f / poolDim.y, 1.0f / poolDim.z, 0.0f);
shaderVars._Weight_MinLoadedCellInEntries = new Vector4(parameters.weight, minLoadedCellPos.x * entriesPerCell, minLoadedCellPos.y * entriesPerCell, minLoadedCellPos.z * entriesPerCell);
shaderVars._MaxLoadedCellInEntries_FrameIndex = new Vector4((maxLoadedCellPos.x + 1) * entriesPerCell - 1, (maxLoadedCellPos.y + 1) * entriesPerCell - 1, (maxLoadedCellPos.z + 1) * entriesPerCell - 1, parameters.frameIndexForNoise); shaderVars._LeakReductionParams = new Vector4((int)parameters.leakReductionMode, parameters.occlusionWeightContribution, parameters.minValidNormalWeight, 0.0f);
shaderVars._MaxLoadedCellInEntries_FrameIndex = new Vector4((maxLoadedCellPos.x + 1) * entriesPerCell - 1, (maxLoadedCellPos.y + 1) * entriesPerCell - 1, (maxLoadedCellPos.z + 1) * entriesPerCell - 1, parameters.frameIndexForNoise);
shaderVars._LeakReductionParams = new Vector4((int)parameters.leakReductionMode, parameters.occlusionWeightContribution, parameters.minValidNormalWeight, 0.0f);

// TODO: Expose this somewhere UX visible? To discuss.
shaderVars._NormalizationClamp_IndirectionEntryDim_Padding = new Vector4(parameters.reflNormalizationLowerClamp, parameters.reflNormalizationUpperClamp, GetEntrySize(), 0);
Expand Down

This file was deleted.

Expand Up @@ -48,7 +48,7 @@
// TODO: Implement density look up
return uv;
}

float2 RemapFoveatedRenderingPrevFrameDensity(float2 uv, bool yFlip = false)
{
// TODO : implement me to support eye tracking that can change the remap each frame
Expand Down Expand Up @@ -77,7 +77,7 @@

return uv;
}

float2 RemapFoveatedRenderingPrevFrameDistort(float2 uv, bool yFlip = false)
{
// TODO : implement me to support eye tracking that can change the remap each frame
Expand Down

0 comments on commit 08a6a72

Please sign in to comment.