Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -440,7 +440,7 @@ public static bool BakeProbes(IEnumerable<HDProbe> bakedProbes)
// to update the texture.
// updateCount is a transient data, so don't execute this code before the asset reload.
{
UnityEngine.Random.InitState((int)(1000 * hdPipeline.GetTime()));
UnityEngine.Random.InitState((int)(1000 * EditorApplication.timeSinceStartup));
foreach (var probe in bakedProbes)
{
var c = UnityEngine.Random.Range(2, 10);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,6 @@ internal bool HasValidRenderedData()
}
else
{
bool hasEverRendered = lastRenderedFrame != int.MinValue;
return hasEverRendered && hasValidTexture;
}
}
Expand Down Expand Up @@ -520,16 +519,16 @@ internal Matrix4x4 proxyToWorld
: influenceToWorld;

internal bool wasRenderedAfterOnEnable { get; private set; } = false;
internal int lastRenderedFrame { get; private set; } = int.MinValue;
internal bool hasEverRendered { get; private set; } = false;

internal void SetIsRendered(int frame)
internal void SetIsRendered()
{
#if UNITY_EDITOR
m_WasRenderedDuringAsyncCompilation = ShaderUtil.anythingCompiling;
#endif
m_WasRenderedSinceLastOnDemandRequest = true;
wasRenderedAfterOnEnable = true;
lastRenderedFrame = frame;
hasEverRendered = true;
}

// API
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ TextureHandle CreateAmbientOcclusionTexture(RenderGraph renderGraph)
return renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true) { enableRandomWrite = true, colorFormat = GraphicsFormat.R8_UNorm, name = "Ambient Occlusion" });
}

public TextureHandle Render(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectors, int frameCount, in HDUtils.PackedMipChainInfo depthMipInfo, ShaderVariablesRaytracing shaderVariablesRaytracing, TextureHandle rayCountTexture)
public TextureHandle Render(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthPyramid, TextureHandle normalBuffer, TextureHandle motionVectors, in HDUtils.PackedMipChainInfo depthMipInfo, ShaderVariablesRaytracing shaderVariablesRaytracing, TextureHandle rayCountTexture)
{
var settings = hdCamera.volumeStack.GetComponent<AmbientOcclusion>();

Expand All @@ -31,7 +31,7 @@ public TextureHandle Render(RenderGraph renderGraph, HDCamera hdCamera, TextureH
hdCamera.AllocateAmbientOcclusionHistoryBuffer(scaleFactor);

if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) && settings.rayTracing.value)
return m_RaytracingAmbientOcclusion.RenderRTAO(renderGraph, hdCamera, depthPyramid, normalBuffer, motionVectors, rayCountTexture, frameCount, shaderVariablesRaytracing);
return m_RaytracingAmbientOcclusion.RenderRTAO(renderGraph, hdCamera, depthPyramid, normalBuffer, motionVectors, rayCountTexture, shaderVariablesRaytracing);
else
{
var historyRT = hdCamera.GetCurrentFrameRT((int)HDCameraFrameHistoryType.AmbientOcclusion);
Expand All @@ -42,7 +42,7 @@ public TextureHandle Render(RenderGraph renderGraph, HDCamera hdCamera, TextureH
historyRT.referenceSize.y * historyRT.scaleFactor.y);
var rtScaleForHistory = hdCamera.historyRTHandleProperties.rtHandleScale;

var aoParameters = PrepareRenderAOParameters(hdCamera, historySize * rtScaleForHistory, frameCount, depthMipInfo);
var aoParameters = PrepareRenderAOParameters(hdCamera, historySize * rtScaleForHistory, depthMipInfo);

var packedData = RenderAO(renderGraph, aoParameters, depthPyramid, normalBuffer);
result = DenoiseAO(renderGraph, aoParameters, depthPyramid, motionVectors, packedData, currentHistory, outputHistory);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,7 @@ struct RenderAOParameters
public ShaderVariablesAmbientOcclusion cb;
}

RenderAOParameters PrepareRenderAOParameters(HDCamera camera, Vector2 historySize, int frameCount, in HDUtils.PackedMipChainInfo depthMipInfo)
RenderAOParameters PrepareRenderAOParameters(HDCamera camera, Vector2 historySize, in HDUtils.PackedMipChainInfo depthMipInfo)
{
var parameters = new RenderAOParameters();

Expand All @@ -304,6 +304,7 @@ RenderAOParameters PrepareRenderAOParameters(HDCamera camera, Vector2 historySiz

float invHalfTanFOV = -camera.mainViewConstants.projMatrix[1, 1];
float aspectRatio = parameters.runningRes.y / parameters.runningRes.x;
uint frameCount = camera.GetCameraFrameCount();

cb._AOParams0 = new Vector4(
parameters.fullResolution ? 0.0f : 1.0f,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,13 +89,12 @@ public DensityVolumeArtistParameters(Color color, float _meanFreePath, float _an
m_EditorAdvancedFade = false;
}

internal void Update(bool animate, float time)
internal void Update(float time)
{
//Update scrolling based on deltaTime
if (volumeMask != null)
{
float animationTime = animate ? time : 0.0f;
textureOffset = (textureScrollingSpeed * animationTime);
textureOffset = (textureScrollingSpeed * time);
// Switch from right-handed to left-handed coordinate system.
textureOffset.x = -textureOffset.x;
textureOffset.y = -textureOffset.y;
Expand Down Expand Up @@ -180,7 +179,7 @@ public partial class DensityVolume : MonoBehaviour


/// <summary>Gather and Update any parameters that may have changed.</summary>
internal void PrepareParameters(bool animate, float time)
internal void PrepareParameters(float time)
{
//Texture has been updated notify the manager
bool updated = previousVolumeMask != parameters.volumeMask;
Expand All @@ -201,7 +200,7 @@ internal void PrepareParameters(bool animate, float time)
#endif
}

parameters.Update(animate, time);
parameters.Update(time);
}

private void NotifyUpdatedTexure()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,12 +105,12 @@ public void DeRegisterVolume(DensityVolume volume)

public bool ContainsVolume(DensityVolume volume) => m_Volumes.Contains(volume);

public List<DensityVolume> PrepareDensityVolumeData(CommandBuffer cmd, HDCamera currentCam, float time)
public List<DensityVolume> PrepareDensityVolumeData(CommandBuffer cmd, HDCamera currentCam)
{
//Update volumes
bool animate = currentCam.animateMaterials;
float time = currentCam.time;
foreach (DensityVolume volume in m_Volumes)
volume.PrepareParameters(animate, time);
volume.PrepareParameters(time);

using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.UpdateDensityVolumeAtlas)))
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -719,8 +719,8 @@ void UpdateShaderVariablesGlobalVolumetrics(ref ShaderVariablesGlobal cb, HDCame

// Get the interpolated anisotropy value.
var fog = hdCamera.volumeStack.GetComponent<Fog>();
int frameIndex = m_FrameCount;
int currIdx = (frameIndex + 0) & 1;
uint frameIndex = hdCamera.GetCameraFrameCount();
uint currIdx = (frameIndex + 0) & 1;

var currParams = hdCamera.vBufferParams[currIdx];

Expand All @@ -744,7 +744,7 @@ void UpdateShaderVariablesGlobalVolumetrics(ref ShaderVariablesGlobal cb, HDCame
cb._VBufferRcpInstancedViewCount = 1.0f / hdCamera.viewCount;
}

DensityVolumeList PrepareVisibleDensityVolumeList(HDCamera hdCamera, CommandBuffer cmd, float time)
DensityVolumeList PrepareVisibleDensityVolumeList(HDCamera hdCamera, CommandBuffer cmd)
{
DensityVolumeList densityVolumes = new DensityVolumeList();

Expand All @@ -765,7 +765,7 @@ DensityVolumeList PrepareVisibleDensityVolumeList(HDCamera hdCamera, CommandBuff
m_VisibleVolumeData.Clear();

// Collect all visible finite volume data, and upload it to the GPU.
var volumes = DensityVolumeManager.manager.PrepareDensityVolumeData(cmd, hdCamera, time);
var volumes = DensityVolumeManager.manager.PrepareDensityVolumeData(cmd, hdCamera);

for (int i = 0; i < Math.Min(volumes.Count, k_MaxVisibleDensityVolumeCount); i++)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3421,7 +3421,7 @@ static void DoFinalPass(in FinalPassParameters parameters,
#if HDRP_DEBUG_STATIC_POSTFX
int textureId = 0;
#else
int textureId = Time.frameCount % blueNoiseTexture.depth;
int textureId = (int)hdCamera.GetCameraFrameCount() % blueNoiseTexture.depth;
#endif

finalPassMaterial.EnableKeyword("DITHER");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,8 @@ internal struct HistoryEffectValidity
// XR multipass and instanced views are supported (see XRSystem)
internal XRPass xr { get; private set; }

internal float deltaTime => time - lastTime;

// Non oblique projection matrix (RHS)
// TODO: this code is never used and not compatible with XR
internal Matrix4x4 nonObliqueProjMatrix
Expand Down Expand Up @@ -453,9 +455,24 @@ internal void Update(FrameSettings currentFrameSettings, HDRenderPipeline hdrp,

// Different views/tabs may have different values of the "Animated Materials" setting.
animateMaterials = CoreUtils.AreAnimatedMaterialsEnabled(aniCam);

time = animateMaterials ? hdrp.GetTime() : 0;
lastTime = animateMaterials ? hdrp.GetLastTime() : 0;
if (animateMaterials)
{
float newTime, deltaTime;
#if UNITY_EDITOR
newTime = Application.isPlaying ? Time.time : Time.realtimeSinceStartup;
deltaTime = Application.isPlaying ? Time.deltaTime : 0.033f;
#else
newTime = Time.time;
deltaTime = Time.deltaTime;
#endif
time = newTime;
lastTime = newTime - deltaTime;
}
else
{
time = 0;
lastTime = 0;
}

// Make sure that the shadow history identification array is allocated and is at the right size
if (shadowHistoryUsage == null || shadowHistoryUsage.Length != hdrp.currentPlatformRenderPipelineSettings.hdShadowInitParams.maxScreenSpaceShadowSlots)
Expand Down Expand Up @@ -674,6 +691,9 @@ internal static void ResetAllHistoryRTHandleSystems(int width, int height)
}
}

unsafe internal void UpdateShaderVariablesGlobalCB(ref ShaderVariablesGlobal cb)
=> UpdateShaderVariablesGlobalCB(ref cb, (int)cameraFrameCount);

unsafe internal void UpdateShaderVariablesGlobalCB(ref ShaderVariablesGlobal cb, int frameCount)
{
bool taaEnabled = frameSettings.IsEnabled(FrameSettingsField.Postprocess)
Expand Down Expand Up @@ -708,8 +728,13 @@ unsafe internal void UpdateShaderVariablesGlobalCB(ref ShaderVariablesGlobal cb,

float ct = time;
float pt = lastTime;
#if UNITY_EDITOR
float dt = time - lastTime;
float sdt = dt;
#else
float dt = Time.deltaTime;
float sdt = Time.smoothDeltaTime;
#endif

cb._Time = new Vector4(ct * 0.05f, ct, ct * 2.0f, ct * 3.0f);
cb._SinTime = new Vector4(Mathf.Sin(ct * 0.125f), Mathf.Sin(ct * 0.25f), Mathf.Sin(ct * 0.5f), Mathf.Sin(ct));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,6 @@ BuildGPULightListOutput BuildGPULightList(RenderGraph render
class PushGlobalCameraParamPassData
{
public HDCamera hdCamera;
public int frameCount;
public ShaderVariablesGlobal globalCB;
public ShaderVariablesXR xrCB;
}
Expand All @@ -237,14 +236,13 @@ void PushGlobalCameraParams(RenderGraph renderGraph, HDCamera hdCamera)
using (var builder = renderGraph.AddRenderPass<PushGlobalCameraParamPassData>("Push Global Camera Parameters", out var passData))
{
passData.hdCamera = hdCamera;
passData.frameCount = m_FrameCount;
passData.globalCB = m_ShaderVariablesGlobalCB;
passData.xrCB = m_ShaderVariablesXRCB;

builder.SetRenderFunc(
(PushGlobalCameraParamPassData data, RenderGraphContext context) =>
{
data.hdCamera.UpdateShaderVariablesGlobalCB(ref data.globalCB, data.frameCount);
data.hdCamera.UpdateShaderVariablesGlobalCB(ref data.globalCB);
ConstantBuffer.PushGlobal(context.cmd, data.globalCB, HDShaderIDs._ShaderVariablesGlobal);
data.hdCamera.UpdateShaderVariablesXRCB(ref data.xrCB);
ConstantBuffer.PushGlobal(context.cmd, data.xrCB, HDShaderIDs._ShaderVariablesXR);
Expand Down Expand Up @@ -442,7 +440,7 @@ TextureHandle RenderSSR(RenderGraph renderGraph,
{
result = RenderRayTracedReflections(renderGraph, hdCamera,
prepassOutput.depthBuffer, prepassOutput.stencilBuffer, prepassOutput.normalBuffer, prepassOutput.resolvedMotionVectorsBuffer, clearCoatMask, skyTexture, rayCountTexture,
m_FrameCount, m_ShaderVariablesRayTracingCB, transparent);
m_ShaderVariablesRayTracingCB, transparent);
}
else
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ void ExecuteWithRenderGraph(RenderRequest renderRequest,
{
scriptableRenderContext = renderContext,
commandBuffer = commandBuffer,
currentFrameIndex = GetFrameCount()
currentFrameIndex = m_FrameCount
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is intended to still use m_FrameCount here and not the one from the camera?

Copy link
Contributor Author

@adrien-de-tocqueville adrien-de-tocqueville Jan 19, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it's intended, i investigated a bit how the frame count was used by the rendergraph, and it seems it should not be camera dependent, but you may want to confirm 😄

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I will take another look and try to confirm then.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think the change is ok yes :)

};

m_RenderGraph.Begin(renderGraphParams);
Expand Down Expand Up @@ -104,7 +104,7 @@ void ExecuteWithRenderGraph(RenderRequest renderRequest,
{
gpuLightListOutput = BuildGPULightList(m_RenderGraph, hdCamera, m_TileAndClusterData, m_TotalLightCount, ref m_ShaderVariablesLightListCB, prepassOutput.depthBuffer, prepassOutput.stencilBuffer, prepassOutput.gbuffer);

lightingBuffers.ambientOcclusionBuffer = m_AmbientOcclusionSystem.Render(m_RenderGraph, hdCamera, prepassOutput.depthPyramidTexture, prepassOutput.resolvedNormalBuffer, prepassOutput.resolvedMotionVectorsBuffer, m_FrameCount, m_DepthBufferMipChainInfo, m_ShaderVariablesRayTracingCB, rayCountTexture);
lightingBuffers.ambientOcclusionBuffer = m_AmbientOcclusionSystem.Render(m_RenderGraph, hdCamera, prepassOutput.depthPyramidTexture, prepassOutput.resolvedNormalBuffer, prepassOutput.resolvedMotionVectorsBuffer, m_DepthBufferMipChainInfo, m_ShaderVariablesRayTracingCB, rayCountTexture);
// Should probably be inside the AO render function but since it's a separate class it's currently not super clean to do.
PushFullScreenDebugTexture(m_RenderGraph, lightingBuffers.ambientOcclusionBuffer, FullScreenDebugMode.ScreenSpaceAmbientOcclusion);

Expand Down Expand Up @@ -135,7 +135,7 @@ void ExecuteWithRenderGraph(RenderRequest renderRequest,
case IndirectDiffuseMode.Raytrace:
lightingBuffers.ssgiLightingBuffer = RenderRayTracedIndirectDiffuse(m_RenderGraph, hdCamera,
prepassOutput.depthBuffer, prepassOutput.stencilBuffer, prepassOutput.normalBuffer, prepassOutput.resolvedMotionVectorsBuffer, m_SkyManager.GetSkyReflection(hdCamera), rayCountTexture,
m_FrameCount, m_ShaderVariablesRayTracingCB);
m_ShaderVariablesRayTracingCB);
break;
default:
lightingBuffers.ssgiLightingBuffer = m_RenderGraph.defaultResources.blackTextureXR;
Expand Down Expand Up @@ -1117,7 +1117,6 @@ class PreRenderSkyPassData
public TextureHandle normalBuffer;
public DebugDisplaySettings debugDisplaySettings;
public SkyManager skyManager;
public int frameCount;
}

void PreRenderSky(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle colorBuffer, TextureHandle depthStencilBuffer, TextureHandle normalbuffer)
Expand All @@ -1135,12 +1134,11 @@ void PreRenderSky(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle colo
passData.normalBuffer = builder.WriteTexture(normalbuffer);
passData.debugDisplaySettings = m_CurrentDebugDisplaySettings;
passData.skyManager = m_SkyManager;
passData.frameCount = m_FrameCount;

builder.SetRenderFunc(
(PreRenderSkyPassData data, RenderGraphContext context) =>
{
data.skyManager.PreRenderSky(data.hdCamera, data.sunLight, data.colorBuffer, data.normalBuffer, data.depthStencilBuffer, data.debugDisplaySettings, data.frameCount, context.cmd);
data.skyManager.PreRenderSky(data.hdCamera, data.sunLight, data.colorBuffer, data.normalBuffer, data.depthStencilBuffer, data.debugDisplaySettings, context.cmd);
});
}
}
Expand All @@ -1157,7 +1155,6 @@ class RenderSkyPassData
public TextureHandle intermediateBuffer;
public DebugDisplaySettings debugDisplaySettings;
public SkyManager skyManager;
public int frameCount;
}

void RenderSky(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle colorBuffer, TextureHandle volumetricLighting, TextureHandle depthStencilBuffer, TextureHandle depthTexture)
Expand All @@ -1177,15 +1174,14 @@ void RenderSky(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle colorBu
passData.intermediateBuffer = builder.CreateTransientTexture(colorBuffer);
passData.debugDisplaySettings = m_CurrentDebugDisplaySettings;
passData.skyManager = m_SkyManager;
passData.frameCount = m_FrameCount;

builder.SetRenderFunc(
(RenderSkyPassData data, RenderGraphContext context) =>
{
// Necessary to perform dual-source (polychromatic alpha) blending which is not supported by Unity.
// We load from the color buffer, perform blending manually, and store to the atmospheric scattering buffer.
// Then we perform a copy from the atmospheric scattering buffer back to the color buffer.
data.skyManager.RenderSky(data.hdCamera, data.sunLight, data.colorBuffer, data.depthStencilBuffer, data.debugDisplaySettings, data.frameCount, context.cmd);
data.skyManager.RenderSky(data.hdCamera, data.sunLight, data.colorBuffer, data.depthStencilBuffer, data.debugDisplaySettings, context.cmd);

if (Fog.IsFogEnabled(data.hdCamera) || Fog.IsPBRFogEnabled(data.hdCamera))
{
Expand Down
Loading