diff --git a/com.unity.render-pipelines.universal/CHANGELOG.md b/com.unity.render-pipelines.universal/CHANGELOG.md
index cc55835262c..f018c0c0230 100644
--- a/com.unity.render-pipelines.universal/CHANGELOG.md
+++ b/com.unity.render-pipelines.universal/CHANGELOG.md
@@ -6,7 +6,12 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
## [Unreleased]
+### Changed
+- Setting the renderingData.cameraData.isSceneCamera is now marked as obsolete and replaced by renderingData.cameraData.cameraType.
+
### Fixed
+- Fixed an issue where grid lines were being drawn on top of opaque objects in the preview window [case 1240723](https://issuetracker.unity3d.com/issues/urp-grid-is-rendered-in-front-of-the-model-in-the-inspector-animation-preview-window-when-depth-or-opaque-texture-is-enabled)
+- Fixed an issue where objects in the preview window were being affected by layer mask settings in the default renderer [case 1204376](https://issuetracker.unity3d.com/issues/urp-prefab-preview-is-blank-when-a-custom-forward-renderer-data-and-default-layer-mask-is-mixed-are-used)
- Fixed issues with performance when importing fbx files
- Fixed issues with NullReferenceException happening with URP shaders
- Fixed an issue where the emission value in particle shaders would not update in the editor without entering playmode.
diff --git a/com.unity.render-pipelines.universal/Runtime/2D/Renderer2D.cs b/com.unity.render-pipelines.universal/Runtime/2D/Renderer2D.cs
index fdd3df00ec7..80a29481440 100644
--- a/com.unity.render-pipelines.universal/Runtime/2D/Renderer2D.cs
+++ b/com.unity.render-pipelines.universal/Runtime/2D/Renderer2D.cs
@@ -150,7 +150,7 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re
EnqueuePass(m_FinalBlitPass);
}
}
-
+
public override void SetupCullingParameters(ref ScriptableCullingParameters cullingParameters, ref CameraData cameraData)
{
cullingParameters.cullingOptions = CullingOptions.None;
diff --git a/com.unity.render-pipelines.universal/Runtime/ForwardRenderer.cs b/com.unity.render-pipelines.universal/Runtime/ForwardRenderer.cs
index afe3ffe7bb1..7c6f1ebe23e 100644
--- a/com.unity.render-pipelines.universal/Runtime/ForwardRenderer.cs
+++ b/com.unity.render-pipelines.universal/Runtime/ForwardRenderer.cs
@@ -147,6 +147,7 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re
// We generate color LUT in the base camera only. This allows us to not break render pass execution for overlay cameras.
bool generateColorGradingLUT = anyPostProcessing && cameraData.renderType == CameraRenderType.Base;
bool isSceneViewCamera = cameraData.isSceneViewCamera;
+ bool isPreviewCamera = cameraData.isPreviewCamera;
bool requiresDepthTexture = cameraData.requiresDepthTexture;
bool isStereoEnabled = cameraData.isStereoEnabled;
@@ -155,10 +156,11 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re
bool transparentsNeedSettingsPass = m_TransparentSettingsPass.Setup(ref renderingData);
// Depth prepass is generated in the following cases:
- // - Scene view camera always requires a depth texture. We do a depth pre-pass to simplify it and it shouldn't matter much for editor.
// - If game or offscreen camera requires it we check if we can copy the depth from the rendering opaques pass and use that instead.
- bool requiresDepthPrepass = isSceneViewCamera;
- requiresDepthPrepass |= (requiresDepthTexture && !CanCopyDepth(ref renderingData.cameraData));
+ // - Scene or preview cameras always require a depth texture. We do a depth pre-pass to simplify it and it shouldn't matter much for editor.
+ bool requiresDepthPrepass = requiresDepthTexture && !CanCopyDepth(ref renderingData.cameraData);
+ requiresDepthPrepass |= isSceneViewCamera;
+ requiresDepthPrepass |= isPreviewCamera;
// The copying of depth should normally happen after rendering opaques.
// But if we only require it for post processing or the scene camera then we do it after rendering transparent objects
@@ -170,6 +172,7 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re
requiresDepthPrepass = true;
bool createColorTexture = RequiresIntermediateColorTexture(ref cameraData);
+ createColorTexture &= !isPreviewCamera;
// If camera requires depth and there's no depth pre-pass we create a depth texture that can be read later by effect requiring it.
bool createDepthTexture = cameraData.requiresDepthTexture && !requiresDepthPrepass;
@@ -328,7 +331,7 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re
}
#if UNITY_EDITOR
- if (renderingData.cameraData.isSceneViewCamera)
+ if (isSceneViewCamera)
{
// Scene view camera should always resolve target (not stacked)
Assertions.Assert.IsTrue(lastCameraInTheStack, "Editor camera must resolve target upon finish rendering.");
@@ -446,13 +449,14 @@ bool RequiresIntermediateColorTexture(ref CameraData cameraData)
if (cameraData.renderType == CameraRenderType.Base && !cameraData.resolveFinalTarget)
return true;
+ bool isSceneViewCamera = cameraData.isSceneViewCamera;
var cameraTargetDescriptor = cameraData.cameraTargetDescriptor;
int msaaSamples = cameraTargetDescriptor.msaaSamples;
bool isStereoEnabled = cameraData.isStereoEnabled;
bool isScaledRender = !Mathf.Approximately(cameraData.renderScale, 1.0f) && !cameraData.isStereoEnabled;
bool isCompatibleBackbufferTextureDimension = cameraTargetDescriptor.dimension == TextureDimension.Tex2D;
bool requiresExplicitMsaaResolve = msaaSamples > 1 && !SystemInfo.supportsMultisampleAutoResolve;
- bool isOffscreenRender = cameraData.targetTexture != null && !cameraData.isSceneViewCamera;
+ bool isOffscreenRender = cameraData.targetTexture != null && !isSceneViewCamera;
bool isCapturing = cameraData.captureActions != null;
#if ENABLE_VR && ENABLE_VR_MODULE
@@ -464,7 +468,7 @@ bool RequiresIntermediateColorTexture(ref CameraData cameraData)
if (isOffscreenRender)
return requiresBlitForOffscreenCamera;
- return requiresBlitForOffscreenCamera || cameraData.isSceneViewCamera || isScaledRender || cameraData.isHdrEnabled ||
+ return requiresBlitForOffscreenCamera || isSceneViewCamera || isScaledRender || cameraData.isHdrEnabled ||
!isCompatibleBackbufferTextureDimension || !cameraData.isDefaultViewport || isCapturing ||
(Display.main.requiresBlitToBackbuffer && !isStereoEnabled);
}
diff --git a/com.unity.render-pipelines.universal/Runtime/Passes/DrawObjectsPass.cs b/com.unity.render-pipelines.universal/Runtime/Passes/DrawObjectsPass.cs
index 9f30f2d6cc9..4342c80bcf7 100644
--- a/com.unity.render-pipelines.universal/Runtime/Passes/DrawObjectsPass.cs
+++ b/com.unity.render-pipelines.universal/Runtime/Passes/DrawObjectsPass.cs
@@ -27,6 +27,7 @@ public DrawObjectsPass(string profilerTag, bool opaque, RenderPassEvent evt, Ren
m_ShaderTagIdList.Add(new ShaderTagId("UniversalForward"));
m_ShaderTagIdList.Add(new ShaderTagId("LightweightForward"));
renderPassEvent = evt;
+
m_FilteringSettings = new FilteringSettings(renderQueueRange, layerMask);
m_RenderStateBlock = new RenderStateBlock(RenderStateMask.Nothing);
m_IsOpaque = opaque;
@@ -56,10 +57,20 @@ public override void Execute(ScriptableRenderContext context, ref RenderingData
Camera camera = renderingData.cameraData.camera;
var sortFlags = (m_IsOpaque) ? renderingData.cameraData.defaultOpaqueSortFlags : SortingCriteria.CommonTransparent;
var drawSettings = CreateDrawingSettings(m_ShaderTagIdList, ref renderingData, sortFlags);
- context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref m_FilteringSettings, ref m_RenderStateBlock);
+ var filterSettings = m_FilteringSettings;
+
+ #if UNITY_EDITOR
+ // When rendering the preview camera, we want the layer mask to be forced to Everything
+ if (renderingData.cameraData.isPreviewCamera)
+ {
+ filterSettings.layerMask = -1;
+ }
+ #endif
+
+ context.DrawRenderers(renderingData.cullResults, ref drawSettings, ref filterSettings, ref m_RenderStateBlock);
// Render objects that did not match any shader pass with error shader
- RenderingUtils.RenderObjectsWithError(context, ref renderingData.cullResults, camera, m_FilteringSettings, SortingCriteria.None);
+ RenderingUtils.RenderObjectsWithError(context, ref renderingData.cullResults, camera, filterSettings, SortingCriteria.None);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
diff --git a/com.unity.render-pipelines.universal/Runtime/Passes/FinalBlitPass.cs b/com.unity.render-pipelines.universal/Runtime/Passes/FinalBlitPass.cs
index e4a41f3aca1..fc51b14fdac 100644
--- a/com.unity.render-pipelines.universal/Runtime/Passes/FinalBlitPass.cs
+++ b/com.unity.render-pipelines.universal/Runtime/Passes/FinalBlitPass.cs
@@ -46,6 +46,7 @@ public override void Execute(ScriptableRenderContext context, ref RenderingData
RenderTargetIdentifier cameraTarget = (cameraData.targetTexture != null) ? new RenderTargetIdentifier(cameraData.targetTexture) : BuiltinRenderTextureType.CameraTarget;
bool requiresSRGBConvertion = Display.main.requiresSrgbBlitToBackbuffer;
+ bool isSceneViewCamera = cameraData.isSceneViewCamera;
// For stereo case, eye texture always want color data in sRGB space.
// If eye texture color format is linear, we do explicit sRGB convertion
@@ -64,7 +65,7 @@ public override void Execute(ScriptableRenderContext context, ref RenderingData
// The blit will be reworked for stereo along the XRSDK work.
Material blitMaterial = (cameraData.isStereoEnabled) ? null : m_BlitMaterial;
cmd.SetGlobalTexture("_BlitTex", m_Source.Identifier());
- if (cameraData.isStereoEnabled || cameraData.isSceneViewCamera || cameraData.isDefaultViewport)
+ if (cameraData.isStereoEnabled || isSceneViewCamera || cameraData.isDefaultViewport)
{
// This set render target is necessary so we change the LOAD state to DontCare.
cmd.SetRenderTarget(BuiltinRenderTextureType.CameraTarget,
diff --git a/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPass.cs b/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPass.cs
index 5aeef113cd4..efaba7bb9a6 100644
--- a/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPass.cs
+++ b/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPass.cs
@@ -240,6 +240,7 @@ void Render(CommandBuffer cmd, ref RenderingData renderingData)
bool tempTarget2Used = false;
int source = m_Source.id;
int destination = -1;
+ bool isSceneViewCamera = cameraData.isSceneViewCamera;
// Utilities to simplify intermediate target management
int GetSource() => source;
@@ -257,7 +258,7 @@ int GetDestination()
// Avoid using m_Source.id as new destination, it may come with a depth buffer that we don't want, may have MSAA that we don't want etc
cmd.GetTemporaryRT(ShaderConstants._TempTarget2, GetStereoCompatibleDescriptor(), FilterMode.Bilinear);
destination = ShaderConstants._TempTarget2;
- tempTarget2Used = true;
+ tempTarget2Used = true;
}
return destination;
@@ -291,7 +292,7 @@ int GetDestination()
}
// Depth of Field
- if (m_DepthOfField.IsActive() && !cameraData.isSceneViewCamera)
+ if (m_DepthOfField.IsActive() && !isSceneViewCamera)
{
var markerName = m_DepthOfField.mode.value == DepthOfFieldMode.Gaussian
? URPProfileId.GaussianDepthOfField
@@ -305,7 +306,7 @@ int GetDestination()
}
// Motion blur
- if (m_MotionBlur.IsActive() && !cameraData.isSceneViewCamera)
+ if (m_MotionBlur.IsActive() && !isSceneViewCamera)
{
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.MotionBlur)))
{
@@ -316,7 +317,7 @@ int GetDestination()
// Panini projection is done as a fullscreen pass after all depth-based effects are done
// and before bloom kicks in
- if (m_PaniniProjection.IsActive() && !cameraData.isSceneViewCamera)
+ if (m_PaniniProjection.IsActive() && !isSceneViewCamera)
{
using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.PaniniProjection)))
{
@@ -340,7 +341,7 @@ int GetDestination()
}
// Setup other effects constants
- SetupLensDistortion(m_Materials.uber, cameraData.isSceneViewCamera);
+ SetupLensDistortion(m_Materials.uber, isSceneViewCamera);
SetupChromaticAberration(m_Materials.uber);
SetupVignette(m_Materials.uber);
SetupColorGrading(cmd, ref renderingData, m_Materials.uber);
diff --git a/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs b/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs
index 0ff8c24c456..c175567faba 100644
--- a/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs
+++ b/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs
@@ -207,6 +207,7 @@ static void RenderSingleCamera(ScriptableRenderContext context, CameraData camer
return;
ScriptableRenderer.current = renderer;
+ bool isSceneViewCamera = cameraData.isSceneViewCamera;
ProfilingSampler sampler = (asset.debugLevel >= PipelineDebugLevel.Profiling) ? new ProfilingSampler(camera.name): _CameraProfilingSampler;
CommandBuffer cmd = CommandBufferPool.Get(sampler.name);
@@ -220,7 +221,7 @@ static void RenderSingleCamera(ScriptableRenderContext context, CameraData camer
#if UNITY_EDITOR
// Emit scene view UI
- if (cameraData.isSceneViewCamera)
+ if (isSceneViewCamera)
{
ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);
}
@@ -432,13 +433,14 @@ static void InitializeStackedCameraData(Camera baseCamera, UniversalAdditionalCa
var settings = asset;
cameraData.targetTexture = baseCamera.targetTexture;
cameraData.isStereoEnabled = IsStereoEnabled(baseCamera);
- cameraData.isSceneViewCamera = baseCamera.cameraType == CameraType.SceneView;
-
+ cameraData.cameraType = baseCamera.cameraType;
cameraData.numberOfXRPasses = 1;
cameraData.isXRMultipass = false;
+ bool isSceneViewCamera = cameraData.isSceneViewCamera;
+
#if ENABLE_VR && ENABLE_VR_MODULE
- if (cameraData.isStereoEnabled && !cameraData.isSceneViewCamera && XR.XRSettings.stereoRenderingMode == XR.XRSettings.StereoRenderingMode.MultiPass)
+ if (cameraData.isStereoEnabled && !isSceneViewCamera && XR.XRSettings.stereoRenderingMode == XR.XRSettings.StereoRenderingMode.MultiPass)
{
cameraData.numberOfXRPasses = 2;
cameraData.isXRMultipass = true;
@@ -448,7 +450,7 @@ static void InitializeStackedCameraData(Camera baseCamera, UniversalAdditionalCa
///////////////////////////////////////////////////////////////////
// Environment and Post-processing settings /
///////////////////////////////////////////////////////////////////
- if (cameraData.isSceneViewCamera)
+ if (isSceneViewCamera)
{
cameraData.volumeLayerMask = 1; // "Default"
cameraData.volumeTrigger = null;
@@ -525,10 +527,10 @@ static void InitializeAdditionalCameraData(Camera camera, UniversalAdditionalCam
bool anyShadowsEnabled = settings.supportsMainLightShadows || settings.supportsAdditionalLightShadows;
cameraData.maxShadowDistance = Mathf.Min(settings.shadowDistance, camera.farClipPlane);
- cameraData.maxShadowDistance = (anyShadowsEnabled && cameraData.maxShadowDistance >= camera.nearClipPlane) ?
- cameraData.maxShadowDistance : 0.0f;
+ cameraData.maxShadowDistance = (anyShadowsEnabled && cameraData.maxShadowDistance >= camera.nearClipPlane) ? cameraData.maxShadowDistance : 0.0f;
- if (cameraData.isSceneViewCamera)
+ bool isSceneViewCamera = cameraData.isSceneViewCamera;
+ if (isSceneViewCamera)
{
cameraData.renderType = CameraRenderType.Base;
cameraData.clearDepth = true;
@@ -569,7 +571,7 @@ static void InitializeAdditionalCameraData(Camera camera, UniversalAdditionalCam
// Disables post if GLes2
cameraData.postProcessEnabled &= SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2;
- cameraData.requiresDepthTexture |= cameraData.isSceneViewCamera || CheckPostProcessForDepth(cameraData);
+ cameraData.requiresDepthTexture |= isSceneViewCamera || CheckPostProcessForDepth(cameraData);
cameraData.resolveFinalTarget = resolveFinalTarget;
Matrix4x4 projectionMatrix = camera.projectionMatrix;
@@ -631,8 +633,6 @@ static void InitializeRenderingData(UniversalRenderPipelineAsset settings, ref C
InitializePostProcessingData(settings, out renderingData.postProcessingData);
renderingData.supportsDynamicBatching = settings.supportsDynamicBatching;
renderingData.perObjectData = GetPerObjectLightFlags(renderingData.lightData.additionalLightsCount);
-
- bool isOffscreenCamera = cameraData.targetTexture != null && !cameraData.isSceneViewCamera;
renderingData.postProcessingEnabled = anyPostProcessingEnabled;
}
diff --git a/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs b/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs
index bd835a9471c..7a4a50f7257 100644
--- a/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs
+++ b/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs
@@ -74,7 +74,7 @@ public Matrix4x4 GetProjectionMatrix()
///
/// Returns the camera GPU projection matrix. This contains platform specific changes to handle y-flip and reverse z.
- /// Similar to GL.GetGPUProjectionMatrix but queries URP internal state to know if the pipeline is rendering to render texture.
+ /// Similar to GL.GetGPUProjectionMatrix but queries URP internal state to know if the pipeline is rendering to render texture.
/// For more info on platform differences regarding camera projection check: https://docs.unity3d.com/Manual/SL-PlatformDifferences.html
///
///
@@ -94,12 +94,26 @@ public Matrix4x4 GetGPUProjectionMatrix()
internal float aspectRatio;
public float renderScale;
public bool clearDepth;
- public bool isSceneViewCamera;
+ public CameraType cameraType;
public bool isDefaultViewport;
public bool isHdrEnabled;
public bool requiresDepthTexture;
public bool requiresOpaqueTexture;
+ ///
+ /// True if the camera rendering is for the scene window in the editor
+ ///
+ public bool isSceneViewCamera
+ {
+ get => cameraType == CameraType.SceneView;
+ set { Debug.LogWarning("Setting isSceneViewCamera is an invalid operation. Please use cameraType instead."); }
+ }
+
+ ///
+ /// True if the camera rendering is for the preview window in the editor
+ ///
+ public bool isPreviewCamera => cameraType == CameraType.Preview;
+
///
/// True if the camera device projection matrix is flipped. This happens when the pipeline is rendering
/// to a render texture in non OpenGL platforms. If you are doing a custom Blit pass to copy camera textures
@@ -178,7 +192,7 @@ public static class ShaderPropertyId
public static readonly int viewAndProjectionMatrix = Shader.PropertyToID("unity_MatrixVP");
public static readonly int inverseViewMatrix = Shader.PropertyToID("unity_MatrixInvV");
- // Undefined:
+ // Undefined:
// public static readonly int inverseProjectionMatrix = Shader.PropertyToID("unity_MatrixInvP");
public static readonly int inverseViewAndProjectionMatrix = Shader.PropertyToID("unity_MatrixInvVP");
@@ -346,7 +360,7 @@ static RenderTextureDescriptor CreateRenderTextureDescriptor(Camera camera, floa
{
bool use32BitHDR = !needsAlpha && RenderingUtils.SupportsRenderTextureFormat(RenderTextureFormat.RGB111110Float);
RenderTextureFormat hdrFormat = (use32BitHDR) ? RenderTextureFormat.RGB111110Float : RenderTextureFormat.DefaultHDR;
-
+
desc.colorFormat = isHdrEnabled ? hdrFormat : renderTextureFormatDefault;
desc.depthBufferBits = 32;
desc.msaaSamples = msaaSamples;