From d84c56227e52ad504e9a176b8a6cb9742b6c2c67 Mon Sep 17 00:00:00 2001 From: Unity Technologies <@unity.com> Date: Thu, 18 Mar 2021 19:22:06 +0100 Subject: [PATCH] com.unity.render-pipelines.core@11.0.0 in Unity 2021.1.0f1 ## [11.0.0] - 2020-10-21 ### Added - Support for the PlayStation 5 platform has been added. - Support for the XboxSeries platform has been added. ### Fixed - Fixed the default background color for previews to use the original color. - Fixed a bug in FreeCamera which would only provide a speed boost for the first frame when pressing the Shfit key. - Fixed spacing between property fields on the Volume Component Editors. - Fixed ALL/NONE to maintain the state on the Volume Component Editors. - Fixed the selection of the Additional properties from ALL/NONE when the option "Show additional properties" is disabled - Fixed ACES tonemaping for Nintendo Switch by forcing some shader color conversion functions to full float precision. - Fixed missing warning UI about Projector component being unsupported (case 1300327). - Fixed the display name of a Volume Parameter when is defined the attribute InspectorName - Fix crash on VolumeComponentWithQualityEditor when the current Pipeline is not HDRP --- CHANGELOG.md | 16 +- Documentation~/TableOfContents.md | 2 +- Documentation~/custom-material-inspector.md | 50 ++++++ Documentation~/render-graph-benefits.md | 4 +- Documentation~/render-graph-fundamentals.md | 18 +- Documentation~/render-graph-system.md | 8 +- .../render-graph-writing-a-render-pipeline.md | 75 +++------ .../rthandle-system-fundamentals.md | 16 +- Documentation~/rthandle-system-using.md | 64 +++---- Documentation~/rthandle-system.md | 5 +- Editor/CoreEditorUtils.cs | 21 +++ Editor/Gizmo/GizmoUtility.cs | 26 +++ Editor/Gizmo/GizmoUtility.cs.meta | 11 ++ Editor/Gizmo/HierarchicalBox.cs | 23 ++- Editor/LookDev/CameraController.cs | 22 ++- Editor/LookDev/ComparisonGizmoController.cs | 8 +- Editor/LookDev/Compositor.cs | 16 +- Editor/LookDev/DisplayWindow.cs | 24 ++- Editor/LookDev/EnvironmentLibrary.cs | 25 ++- Editor/LookDev/LookDev.cs | 33 +++- Editor/LookDev/Stage.cs | 8 +- Editor/MaterialUpgrader.cs | 2 +- Editor/ProjectorEditor.cs | 23 +++ Editor/ProjectorEditor.cs.meta | 3 + Editor/Volume/Drawers/IntParameterDrawer.cs | 16 ++ Editor/Volume/VolumeComponentEditor.cs | 154 ++++++++++++++--- Editor/Volume/VolumeComponentListEditor.cs | 10 +- Runtime/Camera/FreeCamera.cs | 8 +- Runtime/Common/CoreAttributes.cs | 8 + Runtime/RenderGraph/RenderGraph.cs | 2 + .../RenderGraphResourceComputeBuffer.cs | 2 +- .../RenderGraph/RenderGraphResourcePool.cs | 18 +- .../RenderGraph/RenderGraphResourceTexture.cs | 2 +- Runtime/Textures/RTHandleSystem.cs | 14 +- Runtime/Textures/TextureXR.cs | 1 + Runtime/Utilities/CameraCaptureBridge.cs | 79 +-------- .../CoreRenderPipelinePreferences.cs | 31 +++- Runtime/Utilities/CoreUtils.cs | 29 +++- Runtime/Volume/VolumeComponent.cs | 24 ++- ShaderLibrary/Common.hlsl | 10 +- ShaderLibrary/GeometricTools.hlsl | 39 ++++- ShaderLibrary/Texture.hlsl | 8 +- ShaderLibrary/UnityInstancing.hlsl | 2 +- ShaderLibrary/Version.hlsl | 3 + Tests/Editor/ReflectionUtils.cs | 79 +++++++++ Tests/Editor/ReflectionUtils.cs.meta | 11 ++ Tests/Editor/Volumes.meta | 8 + Tests/Editor/Volumes/VolumeComponentTests.cs | 159 ++++++++++++++++++ .../Volumes/VolumeComponentTests.cs.meta | 11 ++ package.json | 3 +- 50 files changed, 915 insertions(+), 319 deletions(-) create mode 100644 Documentation~/custom-material-inspector.md create mode 100644 Editor/Gizmo/GizmoUtility.cs create mode 100644 Editor/Gizmo/GizmoUtility.cs.meta create mode 100644 Editor/ProjectorEditor.cs create mode 100644 Editor/ProjectorEditor.cs.meta create mode 100644 Tests/Editor/ReflectionUtils.cs create mode 100644 Tests/Editor/ReflectionUtils.cs.meta create mode 100644 Tests/Editor/Volumes.meta create mode 100644 Tests/Editor/Volumes/VolumeComponentTests.cs create mode 100644 Tests/Editor/Volumes/VolumeComponentTests.cs.meta diff --git a/CHANGELOG.md b/CHANGELOG.md index c79af6b..a3cd907 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,8 +6,20 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [11.0.0] - 2020-10-21 -Version Updated -The version number for this package has increased due to a version update of a related graphics package. +### Added +- Support for the PlayStation 5 platform has been added. +- Support for the XboxSeries platform has been added. + +### Fixed +- Fixed the default background color for previews to use the original color. +- Fixed a bug in FreeCamera which would only provide a speed boost for the first frame when pressing the Shfit key. +- Fixed spacing between property fields on the Volume Component Editors. +- Fixed ALL/NONE to maintain the state on the Volume Component Editors. +- Fixed the selection of the Additional properties from ALL/NONE when the option "Show additional properties" is disabled +- Fixed ACES tonemaping for Nintendo Switch by forcing some shader color conversion functions to full float precision. +- Fixed missing warning UI about Projector component being unsupported (case 1300327). +- Fixed the display name of a Volume Parameter when is defined the attribute InspectorName +- Fix crash on VolumeComponentWithQualityEditor when the current Pipeline is not HDRP ## [10.2.0] - 2020-10-19 diff --git a/Documentation~/TableOfContents.md b/Documentation~/TableOfContents.md index bbd91a4..c099611 100644 --- a/Documentation~/TableOfContents.md +++ b/Documentation~/TableOfContents.md @@ -12,7 +12,7 @@ * [RTHandle system](rthandle-system.md) * [RTHandle fundamentals](rthandle-system-fundamentals.md) * [Using the RTHandle system](rthandle-system-using.md) - +* [Custom Material Inspector](custom-material-inspector.md) * [Look Dev](Look-Dev) diff --git a/Documentation~/custom-material-inspector.md b/Documentation~/custom-material-inspector.md new file mode 100644 index 0000000..64d6a5b --- /dev/null +++ b/Documentation~/custom-material-inspector.md @@ -0,0 +1,50 @@ +# Custom Material Inspector + +Custom Material Inspectors enable you to define how Unity displays properties in the Material Inspector for a particular shader. This is useful if a shader includes a lot of properties and you want to organize them in the Inspector. The Universal Render Pipeline (URP) and High Definition Render Pipeline (HDRP) both support custom Material Inspectors, but the method to create them is slightly different. + +## Creating a custom Material Inspector + +The implementation for custom Material Inspectors differs between URP and HDRP. For example, for compatibility purposes, every custom Material Inspector in HDRP must inherit from `HDShaderGUI` which does not exist in URP. For information on how to create custom Material Inspectors for the respective render pipelines, see: + +- **HDRP**: [HDRP custom Material Inspectors](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@latest?subfolder=/manual/hdrp-custom-material-inspector.html). +- **URP**: [Unity Custom Shader GUI](https://docs.unity3d.com/Manual/SL-CustomShaderGUI.html). + +## Assigning a custom Material Inspector + +When you create a shader, either hand-written or using Shader Graph, both URP and HDRP provide a default editor for it to use. To override this default and provide your own custom Material Inspector, the method differs depending on whether you hand-wrote the shader or used Shader Graph. + +### Using hand-written shaders + +To set a custom Material Inspector for a hand-written shader: + +1. Open the shader source file. +2. Assign a string that contains the class name of the custom Material Inspector to the **CustomEditor** shader instruction. + +This is the same method as for the Built-in Renderer's [custom shader GUI](). + +For an example of how to do this, see the following shader code sample. In this example, the name of the custom Material Inspector class is **ExampleCustomMaterialInspector**: + +```c# +Shader "Custom/Example" +{ + Properties + { + // Shader properties + } + SubShader + { + // Shader code + } + CustomEditor "ExampleCustomMaterialInspector" +} +``` + + +### Using Shader Graph + +To set a custom Material Inspector for a Shader Graph shader: + +1. Open the Shader Graph. +2. In the [Graph Inspector](), open the Graph Settings tab. +3. If **Active Targets** does not include the render pipeline your project uses, click the **plus** button then, in the drop-down, click the render pipeline. +4. In the render pipeline section (**HDRP** or **URP** depending on the render pipeline your project uses) find the **Custom Editor GUI** property and provide it the name of the custom Material Inspector. diff --git a/Documentation~/render-graph-benefits.md b/Documentation~/render-graph-benefits.md index 7fd4cd2..2802c6f 100644 --- a/Documentation~/render-graph-benefits.md +++ b/Documentation~/render-graph-benefits.md @@ -2,7 +2,7 @@ ## Efficient memory management -When you manage resource allocation manually, you have to account for when every rendering feature is active at the same time and thus allocate for the worst-case scenario. When particular rendering features are not active, the resources to process them are there, but the render pipeline just does not use them. Contrarily, a render graph only allocates resources that the frame actually uses. This reduces the memory footprint of the render pipeline and means that there is no need to create complicated logic to handle resource allocation. Another benefit of efficient memory management is that, since a render graph can reuse resources efficiently, there are more resources available to create features for your render pipeline. +When you manage resource allocation manually, you have to account for scenarios when every rendering feature is active at the same time and thus allocate for the worst-case scenario. When particular rendering features are not active, the resources to process them are there, but the render pipeline does not use them. A render graph only allocates resources that the frame actually uses. This reduces the memory footprint of the render pipeline and means that there is no need to create complicated logic to handle resource allocation. Another benefit of efficient memory management is that, because a render graph can reuse resources efficiently, there are more resources available to create features for your render pipeline. ## Automatic synchronization point generation @@ -10,4 +10,4 @@ Asynchronous compute queues can run in parallel to the regular graphic workload ## Maintainability -One of the most complex issues when it comes to render pipeline maintenance is the management of resources. Since a render graph manages resources internally, it makes it much easier to maintain your render pipeline. Using the RenderGraph API, you can write efficient self-contained rendering modules that declare their input and output explicitly and are able to plug in anywhere in a render pipeline. +One of the most complex issues in render pipeline maintenance is the management of resources. Because a render graph manages resources internally, it makes it much easier to maintain your render pipeline. Using the RenderGraph API, you can write efficient self-contained rendering modules that declare their input and output explicitly and are able to plug in anywhere in a render pipeline. diff --git a/Documentation~/render-graph-fundamentals.md b/Documentation~/render-graph-fundamentals.md index 6aede0d..3cd759b 100644 --- a/Documentation~/render-graph-fundamentals.md +++ b/Documentation~/render-graph-fundamentals.md @@ -1,37 +1,37 @@ # Render graph fundamentals -This document describes the main principles behind a render graph and an overview of how Unity executes it. +This document describes the main principles of a render graph and an overview of how Unity executes it. ## Main principles -There are a few things to know before you can write render passes with the [RenderGraph](../api/UnityEngine.Experimental.Rendering.RenderGraphModule.RenderGraph.html) API. The following principles are the foundation of how a render graph works. +Before you can write render passes with the [RenderGraph](../api/UnityEngine.Experimental.Rendering.RenderGraphModule.RenderGraph.html) API, you need to know the following foundational principles: - You no longer handle resources directly and instead use render graph system-specific handles. All RenderGraph APIs use these handles to manipulate resources. The resource types a render graph manages are [RTHandles](rthandle-system.md), [ComputeBuffers](https://docs.unity3d.com/ScriptReference/ComputeBuffer.html), and [RendererLists](../api/UnityEngine.Experimental.Rendering.RendererList.html). - Actual resource references are only accessible within the execution code of a render pass. -- The framework requires an explicit declaration of render passes. Each render pass needs to state which resources it reads from and/or writes to. +- The framework requires an explicit declaration of render passes. Each render pass must state which resources it reads from and/or writes to. - There is no persistence between each execution of a render graph. This means that the resources you create inside one execution of the render graph cannot carry over to the next execution. -- For resources that need persistence (from one frame to another for example), you can create them outside of a render graph, like regular resources, and import them in. They behave as any other render graph resource in terms of dependency tracking, but the graph does not handle their lifetime. +- For resources that need persistence (from one frame to another for example), you can create them outside of a render graph, like regular resources, and import them in. They behave like any other render graph resource in terms of dependency tracking, but the graph does not handle their lifetime. - A render graph mostly uses `RTHandles` for texture resources. This has a number of implications on how to write shader code and how to set them up. ## Resource Management -The render graph system calculates the lifetime of each resource using the high-level representation of the whole frame. This means that when you create a resource via the RenderGraph API, the render graph system does not actually create the resource at that time. Instead, the API returns a handle that represents the resource that you then use with all RenderGraph APIs. The render graph only creates the resource just before the first pass that needs to write it. In this case, “creating” does not necessarily mean the render graph system allocates resources, but rather that it provides the necessary memory to represent the resource so that it can use the resource during a render pass. In the same manner, it also releases the resource memory after the last pass that needs to read it. This way, the render graph system can reuse memory in the most efficient manner based on what you declare in your passes. This also means that if the render graph system does not execute a pass that requires a specific resource, then the system does not allocate the memory for the resource. +The render graph system calculates the lifetime of each resource with the high-level representation of the whole frame. This means that when you create a resource via the RenderGraph API, the render graph system does not create the resource at that time. Instead, the API returns a handle that represents the resource, which you then use with all RenderGraph APIs. The render graph only creates the resource just before the first pass that needs to write it. In this case, “creating” does not necessarily mean that the render graph system allocates resources. Rather, it means that it provides the necessary memory to represent the resource so that it can use the resource during a render pass. In the same manner, it also releases the resource memory after the last pass that needs to read it. This way, the render graph system can reuse memory in the most efficient manner based on what you declare in your passes. If the render graph system does not execute a pass that requires a specific resource, then the system does not allocate the memory for the resource. ## Render graph execution overview -Render graph execution is a three-step process the render graph system completes, from scratch, every frame. This is because a graph can change dynamically from frame to frame, depending on the actions of the user for example. +Render graph execution is a three-step process that the render graph system completes, from scratch, every frame. This is because a graph can change dynamically from frame to frame, for example, depending on the actions of the user. ### Setup -The first step is to set up all the render passes. This is where you declare all the render passes to execute as well as the resources each render pass uses. +The first step is to set up all the render passes. This is where you declare all the render passes to execute and the resources each render pass uses. ### Compilation -The second step is to compile the graph. During this step, the render graph system culls render passes if no other render pass uses their outputs. This allows for more careless setups because you can reduce specific logic when you set up the graph. A good example of that is debug render passes. If you declare a render pass that produces a debug output that you don't present to the back buffer, the render graph system culls that pass automatically. +The second step is to compile the graph. During this step, the render graph system culls render passes if no other render pass uses their outputs. This allows for less organized setups because you can reduce specific logic when you set up the graph. A good example of that is debug render passes. If you declare a render pass that produces a debug output that you don't present to the back buffer, the render graph system culls that pass automatically. This step also calculates the lifetime of resources. This allows the render graph system to create and release resources in an efficient way as well as compute the proper synchronization points when it executes passes on the asynchronous compute pipeline. ### Execution -Finally, execute the graph. The render graph system executes all render passes, that it did not cull, in declaration order. Before each render pass, the render graph system creates the proper resources and releases them after the render pass if later render passes do not use them. +Finally, execute the graph. The render graph system executes all render passes that it did not cull, in declaration order. Before each render pass, the render graph system creates the proper resources and releases them after the render pass if later render passes do not use them. diff --git a/Documentation~/render-graph-system.md b/Documentation~/render-graph-system.md index c468f4d..25817e6 100644 --- a/Documentation~/render-graph-system.md +++ b/Documentation~/render-graph-system.md @@ -1,16 +1,16 @@ # The render graph system -The render graph system is a system that sits on top of Unity's Scriptable Render Pipeline (SRP). It allows you to author a custom SRP in a maintainable and modular way. Unity's High Definition Render Pipeline (HDRP) uses the render graph system. +The render graph system sits on top of Unity's Scriptable Render Pipeline (SRP). It allows you to author a custom SRP in a maintainable and modular way. Unity's High Definition Render Pipeline (HDRP) uses the render graph system. -You use the [RenderGraph](../api/UnityEngine.Experimental.Rendering.RenderGraphModule.RenderGraph.html) API to create a render graph. A render graph is a high-level representation of the custom SRP's render passes, which explicitly states how they use resources. +You use the [RenderGraph](../api/UnityEngine.Experimental.Rendering.RenderGraphModule.RenderGraph.html) API to create a render graph. A render graph is a high-level representation of the custom SRP's render passes, which explicitly states how the render passes use resources. -Describing render passes in this way has two benefits: it simplifies render pipeline configuration, and it allows the render graph system to efficiently manage various parts of the render pipeline, which can result in improved runtime performance. For more information on the benefits of the render graph system, see [benefits of the render graph system](render-graph-benefits.md). +Describing render passes in this way has two benefits: it simplifies render pipeline configuration, and it allows the render graph system to efficiently manage parts of the render pipeline, which can result in improved runtime performance. For more information on the benefits of the render graph system, see [benefits of the render graph system](render-graph-benefits.md). To use the render graph system, you need to write your code in a different way to a regular custom SRP. For more information on how to write code for the render graph system, see [writing a render pipeline](render-graph-writing-a-render-pipeline.md). For information on the technical principles behind the render graph system, see [render graph fundamentals](render-graph-fundamentals.md). -**Note**: Render graph is currently experimental which means Unity may change its API during future development. +**Note**: Render graph is currently experimental which means Unity might change its API during future development. This section contains the following pages: diff --git a/Documentation~/render-graph-writing-a-render-pipeline.md b/Documentation~/render-graph-writing-a-render-pipeline.md index 779b091..7857d87 100644 --- a/Documentation~/render-graph-writing-a-render-pipeline.md +++ b/Documentation~/render-graph-writing-a-render-pipeline.md @@ -2,7 +2,7 @@ This page covers the process of how to use the RenderGraph API to write a render pipeline. For information about the RenderGraph API, see [render graph system](render-graph-system.md) and [render graph fundamentals](render-graph-fundamentals.md). -### Initialization and clean-up of Render Graph +### Initialization and cleanup of Render Graph To begin, your render pipeline needs to maintain at least one instance of [RenderGraph](../api/UnityEngine.Experimental.Rendering.RenderGraphModule.RenderGraph.html). This is the main entry point for the API. You can use more than one instance of a render graph, but be aware that Unity does not share resources across `RenderGraph` instances so for optimal memory usage, only use one instance. @@ -26,13 +26,11 @@ public class MyRenderPipeline : RenderPipeline } ``` -To initialize a `RenderGraph` instance, call the constructor with an optional name to identify the render graph. This also registers a render graph specific panel in the SRP Debug Window which is useful to debug the RenderGraph instance. When you dispose of a render pipeline, make sure to also call the `Cleanup()` method on the RenderGraph instance to properly free all the resources the render graph allocated. +To initialize a `RenderGraph` instance, call the constructor with an optional name to identify the render graph. This also registers a render graph-specific panel in the SRP Debug window which is useful for debugging the RenderGraph instance. When you dispose of a render pipeline, call the `Cleanup()` method on the RenderGraph instance to properly free all the resources the render graph allocated. ### Starting a render graph -Before you add any render pass to the render graph, you first need to initialize the render graph. - -To do this, call the `Begin` method. For details about this method's parameters, see the [API documentation](../api/UnityEngine.Experimental.Rendering.RenderGraphModule.RenderGraph.html) +Before you add any render passes to the render graph, you first need to initialize the render graph. To do this, call the `Begin` method. For details about this method's parameters, see the [API documentation](../api/UnityEngine.Experimental.Rendering.RenderGraphModule.RenderGraph.html) ```c# var renderGraphParams = new RenderGraphExecuteParams() @@ -47,12 +45,12 @@ m_RenderGraph.Begin(renderGraphParams); ### Creating resources for the render graph -When you use a render graph, you never directly allocate resources yourself. Instead, the RenderGraph instance handles the allocation and disposal of all of its own resources. To declare resources and use them in a render pass, you use render graph specific APIs that return handles to the resource. +When you use a render graph, you never directly allocate resources yourself. Instead, the RenderGraph instance handles the allocation and disposal of its own resources. To declare resources and use them in a render pass, you use render graph specific APIs that return handles to the resource. There are two main types of resources that a render graph uses: -- **Internal resources**: These resources are internal to a render graph execution and you can not access them outside of the RenderGraph instance. You also cannot pass these resources from one execution of a graph to another. The render graph completely handles the lifetime of these resources. -- **Imported resources**: These usually come from outside of the render graph execution. Typical examples are the back buffer (provided by the camera) or buffers that you want the graph to use across multiple frames for temporal effects (like using the camera color buffer for temporal anti-aliasing). You are responsible for handling the lifetime of these resources. +- **Internal resources**: These resources are internal to a render graph execution and you cannot access them outside of the RenderGraph instance. You also cannot pass these resources from one execution of a graph to another. The render graph handles the lifetime of these resources. +- **Imported resources**: These usually come from outside the render graph execution. Typical examples are the back buffer (provided by the camera) or buffers that you want the graph to use across multiple frames for temporal effects (like using the camera color buffer for temporal anti-aliasing). You are responsible for handling the lifetime of these resources. After you create or import a resource, the render graph system represents it as a resource type-specific handle (`TextureHandle`, `ComputeBufferHandle`, or `RendererListHandle`). This way, the render graph can use internal and imported resources in the same way in all of its APIs. @@ -66,42 +64,37 @@ public TextureHandle RenderGraph.ImportBackbuffer(RenderTargetIdentifier rt); public ComputeBufferHandle RenderGraph.ImportComputeBuffer(ComputeBuffer computeBuffer); ``` -These are the main ways to create resources. There are additional variations of these functions, for the complete list, see the [API documentation](../api/UnityEngine.Experimental.Rendering.RenderGraphModule.RenderGraph.html). Note the specific function to use to import the camera back buffer is `RenderTargetIdentifier`. +The main ways to create resources are described above, but there are variations of these functions. For the complete list, see the [API documentation](../api/UnityEngine.Experimental.Rendering.RenderGraphModule.RenderGraph.html). Note that the specific function to use to import the camera back buffer is `RenderTargetIdentifier`. To create resources, each API requires a descriptor structure as a parameter. The properties in these structures are similar to the properties in the resources they represent (respectively [RTHandle](rthandle-system.md), [ComputeBuffer](https://docs.unity3d.com/ScriptReference/ComputeBuffer.html), and [RendererLists](../api/UnityEngine.Experimental.Rendering.RendererList.html)). However, some properties are specific to render graph textures. Here are the most important ones: -- **clearBuffer**: This property tells the graph whether to clear the buffer when the graph creates it. This is how you should clear textures when using the render graph. This is important because a render graph pools resources which means any pass that creates a texture may get an already existing one with undefined content. +- **clearBuffer**: This property tells the graph whether to clear the buffer when the graph creates it. This is how you should clear textures when using the render graph. This is important because a render graph pools resources, which means any pass that creates a texture might get an already existing one with undefined content. - **clearColor**: This property stores the color to clear the buffer to, if applicable. - - There are also two notions specific to textures that a render graph exposes through the `TextureDesc` constructors: - **xrReady**: This boolean indicates to the graph whether this texture is for XR rendering. If true, the render graph creates the texture as an array for rendering into each XR eye. - **dynamicResolution**: This boolean indicates to the graph whether it needs to dynamically resize this texture when the application uses dynamic resolution. If false, the texture does not scale automatically. - You can create resources outside render passes, inside the setup code for a render pass, but not in the rendering code. -Creating a resource outside of all render passes can be useful for cases where the first pass uses a given resource that depends on logic in the code that may change regularly. In this case, you must create the resource before any of those passes. A good example is using the color buffer for either a deferred lighting pass or a forward lighting pass. Both of these passes write to the color buffer, but Unity only executes one of them depending on the current rendering path chosen for the camera. In this case, you would create the color buffer outside both passes and pass it to the correct one as a parameter. +Creating a resource outside of all render passes can be useful for cases where the first pass uses a given resource that depends on logic in the code that might change regularly. In this case, you must create the resource before any of those passes. A good example is using the color buffer for either a deferred lighting pass or a forward lighting pass. Both of these passes write to the color buffer, but Unity only executes one of them depending on the current rendering path chosen for the camera. In this case, you would create the color buffer outside both passes and pass it to the correct one as a parameter. Creating a resource inside a render pass is usually for resources the render pass produces itself. For example, a blur pass requires an already existing input texture, but creates the output itself and returns it at the end of the render pass. - - -Note that creating a resource like that does not actually allocate GPU memory every frame. Instead, it reuses pooled memory. In the context of the render graph, think of resource creation more in terms of data flow in the context of a render pass than actual allocation. If a render pass creates a whole new output then it means that it “creates” a new texture in the render graph. +Note that creating a resource like that does not allocate GPU memory every frame. Instead, the render graph system reuses pooled memory. In the context of the render graph, think of resource creation more in terms of data flow in the context of a render pass than actual allocation. If a render pass creates a whole new output then it “creates” a new texture in the render graph. ### Writing a render pass -Before Unity can execute the render graph, you need to declare all the render passes. You write a render pass in two parts: setup and rendering. +Before Unity can execute the render graph, you must declare all the render passes. You write a render pass in two parts: setup and rendering. #### Setup -During setup, you declare the render pass and all the data it needs to execute. The render graph represents data by a class specific to the render pass which contains all the relevant properties. These can be regular C# constructs (struct, PoDs, etc) as well as render graph resource handles. This data structure is accessible during the actual rendering code. +During setup, you declare the render pass and all the data it needs to execute. The render graph represents data by a class specific to the render pass that contains all the relevant properties. These can be regular C# constructs (struct, PoDs, etc) and render graph resource handles. This data structure is accessible during the actual rendering code. ```c# class MyRenderPassData @@ -112,9 +105,6 @@ class MyRenderPassData } ``` - - - After you define the pass data, you can then declare the render pass itself: ```c# @@ -131,19 +121,17 @@ using (var builder = renderGraph.AddRenderPass("My Render Pass } ``` - - You define the render pass in the `using` scope around the `AddRenderPass` function. At the end of the scope, the render graph adds the render pass to the internal structures of the render graph for later processing. The `builder` variable is an instance of `RenderGraphBuilder`. This is the entry point to build the information relating to the render pass. There are several important parts to this: -- **Declaring resource usage**: This is one of the most important aspects of the RenderGraph API. Here you explicitly declare whether the render pass needs read and/or write access to the resources. This allows the render graph to have an overall view of the whole rendering frame and thus determine the best use of GPU memory as well as synchronization points between various render passes. -- **Declaring the rendering function**: This is the function in which you actually call graphics commands. It receives the pass data you define for the render pass as a parameter as well as the render graph context. You set the rendering function for a render pass via `SetRenderFunc` and the function runs after the graph compiles. -- **Creating transient resources**: Transient, or internal, resources are resources you create for the duration of this render pass only. You create them on the builder rather than the render graph itself to reflect their lifetime. Creating transient resources uses the same parameters as the equivalent function in the RenderGraph APIs. This is particularly useful when a pass uses temporary buffers that should not be accessible outside of the pass. Outside the pass where you declare a transient resource, the handle to the resource becomes invalid and Unity throws errors if you try to use it. +- **Declaring resource usage**: This is one of the most important aspects of the RenderGraph API. Here you explicitly declare whether the render pass needs read and/or write access to the resources. This allows the render graph to have an overall view of the whole rendering frame and thus determine the best use of GPU memory and synchronization points between various render passes. +- **Declaring the rendering function**: This is the function in which you call graphics commands. It receives the pass data you define for the render pass as a parameter as well as the render graph context. You set the rendering function for a render pass via `SetRenderFunc` and the function runs after the graph compiles. +- **Creating transient resources**: Transient, or internal, resources are resources you create for the duration of this render pass only. You create them in the builder rather than the render graph itself to reflect their lifetime. Creating transient resources uses the same parameters as the equivalent function in the RenderGraph APIs. This is particularly useful when a pass uses temporary buffers that should not be accessible outside of the pass. Outside the pass where you declare a transient resource, the handle to the resource becomes invalid and Unity throws errors if you try to use it. -The `passData` variable is an instance of the type you provide when you declare the pass. This is where you set the data that the rendering code can access. Note that the render graph does not use the contents of `passData` right away, but later in the frame, after it registers all the passes and the render graph compiles and executes. This means that any reference the `passData` stores needs to be constant across the whole frame. Otherwise, if you change the content before the render pass executes, it does not contain the correct content during the render pass. For this reason, it is best practice to only store value types in the `passData` unless you are certain that a reference stays constant until the pass finishes execution. +The `passData` variable is an instance of the type you provide when you declare the pass. This is where you set the data that the rendering code can access. Note that the render graph does not use the contents of `passData` right away, but later in the frame, after it registers all the passes and the render graph compiles and executes. This means that any reference the `passData` stores must be constant across the whole frame. Otherwise, if you change the content before the render pass executes, it does not contain the correct content during the render pass. For this reason, it is best practice to only store value types in the `passData` unless you are certain that a reference stays constant until the pass finishes execution. For an overview of the `RenderGraphBuilder` APIs, see the below table. For more details, see the API documentation: @@ -164,34 +152,30 @@ For an overview of the `RenderGraphBuilder` APIs, see the below table. For more #### Rendering Code -After you complete the setup, you can declare the function to use for rendering via the `SetRenderFunc` method on the `RenderGraphBuilder*`*. The function you assign must use the following signature: +After you complete the setup, you can declare the function to use for rendering via the `SetRenderFunc` method on the `RenderGraphBuilder`. The function you assign must use the following signature: ```c# delegate void RenderFunc(PassData data, RenderGraphContext renderGraphContext) where PassData : class, new(); ``` -You can either pass a render function as a `static` function or a lambda. The benefit of using a lambda function is that it can bring better code clarity as the rendering code is right next to the setup code. - +You can either pass a render function as a `static` function or a lambda. The benefit of using a lambda function is that it can bring better code clarity because the rendering code is next to the setup code. - -Note that if you use a lambda, be very careful not to capture any parameters from the main scope of the function as that generates garbage, which Unity later locates and frees during garbage collection. If you use Visual Studio and hover over the arrow **=>**, it tells you if the lambda captures anything from the scope. One thing to avoid is accessing members or member functions as using either captures `this`. +Note that if you use a lambda, be very careful not to capture any parameters from the main scope of the function as that generates garbage, which Unity later locates and frees during garbage collection. If you use Visual Studio and hover over the arrow **=>**, it tells you if the lambda captures anything from the scope. Avoid accessing members or member functions because using either captures `this`. The render function takes two parameters: - `PassData data`: This data is of the type you pass in when you declare the render pass. This is where you can access the properties initialized during the setup phase and use them for the rendering code. -- `RenderGraphContext renderGraphContext`. This stores references to the `ScriptableRenderContext` and the `CommandBuffer` which provide utility functions and allow you to write rendering code. +- `RenderGraphContext renderGraphContext`. This stores references to the `ScriptableRenderContext` and the `CommandBuffer` that provide utility functions and allow you to write rendering code. ##### Accessing resources in the render pass -Inside the rendering function, you can access all the render graph resource handles stored inside the `passData`. The conversion to actual resources is automatic so, whenever a function needs an RTHandle, a ComputeBuffer, or a RendererList, you can just pass the handle and the render graph converts the handle to the actual resource implicitly. Note that doing such implicit conversion outside of a rendering function results in an exception. This exception occurs because, outside of rendering, the render graph may have not allocated those resources yet. +Inside the rendering function, you can access all the render graph resource handles stored inside the `passData`. The conversion to actual resources is automatic so, whenever a function needs an RTHandle, a ComputeBuffer, or a RendererList, you can pass the handle and the render graph converts the handle to the actual resource implicitly. Note that doing such implicit conversion outside of a rendering function results in an exception. This exception occurs because, outside of rendering, the render graph may have not allocated those resources yet. ##### Using the RenderGraphContext -The RenderGraphContext provides various functionality you need to write rendering code. The two most important are the `ScriptableRenderContext` and the `CommandBuffer` which you use to call all rendering commands. - +The RenderGraphContext provides various functionality you need to write rendering code. The two most important are the `ScriptableRenderContext` and the `CommandBuffer`, which you use to call all rendering commands. - -It also contains the `RenderGraphObjectPool`. This class helps you to manage temporary objects which you may need for rendering code. +The RenderGraphContext also contains the `RenderGraphObjectPool`. This class helps you to manage temporary objects that you might need for rendering code. ##### Get temp functions @@ -202,13 +186,11 @@ T[] GetTempArray(int size); MaterialPropertyBlock GetTempMaterialPropertyBlock(); ``` - - `GetTempArray` returns a temporary array of type `T` and size `size`. This can be useful to allocate temporary arrays for passing parameters to materials or creating a `RenderTargetIdentifier` array to create multiple render target setups without the need to manage the array’s lifetime yourself. -`GetTempMaterialPropertyBlock` returns a clean material property block that you can use to set up parameters for a Material. This is particularly important because more than one pass might use a material and each pass could use it with different parameters. Since the rendering code execution is deferred via command buffers, copying material property blocks into the command buffer is mandatory to preserve data integrity on execution. +`GetTempMaterialPropertyBlock` returns a clean material property block that you can use to set up parameters for a Material. This is particularly important because more than one pass might use a material and each pass could use it with different parameters. Because the rendering code execution is deferred via command buffers, copying material property blocks into the command buffer is mandatory to preserve data integrity on execution. -The render graph releases and pools all the resources these two functions return automatically after the pass execution. This saves you from having to manage them yourself and does not create garbage. +The render graph releases and pools all the resources these two functions return automatically after the pass execution. This means you don’t have to manage them yourself and does not create garbage. #### Example render pass @@ -253,7 +235,7 @@ TextureHandle MyRenderPass(RenderGraph renderGraph, TextureHandle inputTexture, ### Execution of the Render Graph -After you declare all the render passes, you then need to execute the render graph. To do this, you need to call the Execute method. +After you declare all the render passes, you then need to execute the render graph. To do this, call the Execute method. ```c# m_RenderGraph.Execute(); @@ -263,7 +245,6 @@ This triggers the process that compiles and executes the render graph. ### Ending the frame -Over the course of your application, the render graph needs to allocate various resources. It may use these resources for a time but then may not need them. For the graph to free up those resources, call the `EndFrame()` method once a frame. This deallocates any resources that the render graph has not used since the last frame. This also executes all internal processing the render graph requires at the end of the frame. - +Over the course of your application, the render graph needs to allocate various resources. It might use these resources for a time but then might not need them. For the graph to free up those resources, call the `EndFrame()` method once a frame. This deallocates any resources that the render graph has not used since the last frame. This also executes all internal processing the render graph requires at the end of the frame. -Note that you should only call this once per frame and after all the rendering is complete (for example, after the last camera renders). This is because different cameras may have different rendering paths and thus need different resources. Calling the purge after each camera could result in the render graph releasing resources too early even though they may be necessary for the next camera. +Note that you should only call this once per frame and after all the rendering is complete (for example, after the last camera renders). This is because different cameras might have different rendering paths and thus need different resources. Calling the purge after each camera could result in the render graph releasing resources too early even though they might be necessary for the next camera. diff --git a/Documentation~/rthandle-system-fundamentals.md b/Documentation~/rthandle-system-fundamentals.md index aac6477..06fc935 100644 --- a/Documentation~/rthandle-system-fundamentals.md +++ b/Documentation~/rthandle-system-fundamentals.md @@ -1,17 +1,17 @@ ## RTHandle system fundamentals -This document describes the main principles behind the RTHandle (RTHandle) system. +This document describes the main principles of the RTHandle (RTHandle) system. -The RTHandle system is an abstraction on top of Unity's [RenderTexture](https://docs.unity3d.com/ScriptReference/RenderTexture.html) API. Its main purpose is to make it trivial to reuse render textures across cameras that use various resolutions. There are a few things to know before you can use it. The following principles are the foundation of how the RTHandle system works: +The RTHandle system is an abstraction on top of Unity's [RenderTexture](https://docs.unity3d.com/ScriptReference/RenderTexture.html) API. It makes it trivial to reuse render textures across Cameras that use various resolutions. The following principles are the foundation of how the RTHandle system works: -- You no longer allocate render textures yourself with a fixed resolution. Instead, you declare a render texture using a scale related to the full screen at a given resolution. The RTHandle system allocates the texture only once for the whole render pipeline so that it can reuse it for different cameras. -- There is now the concept of **reference size**. This is the resolution the application uses for rendering. It is your responsibility to declare it before the render pipeline renders every camera at a particular resolution. For information on how to do this, see [Updating the RTHandle system](rthandle-system-using.md#updating-the-rthandle-system). -- Internally, the RTHandle system tracks the largest reference size you declare. It uses this as the actual size of render textures. The largest reference size is the **maximum size**. +- You no longer allocate render textures yourself with a fixed resolution. Instead, you declare a render texture using a scale related to the full screen at a given resolution. The RTHandle system allocates the texture only once for the whole render pipeline so that it can reuse it for different Cameras. +- There is now the concept of reference size. This is the resolution the application uses for rendering. It is your responsibility to declare it before the render pipeline renders every Camera at a particular resolution. For information on how to do this, see the [Updating the RTHandle system](#updating-the-rthandle-system) section. +- Internally, the RTHandle system tracks the largest reference size you declare. It uses this as the actual size of render textures. The largest reference size is the maximum size. - Every time you declare a new reference size for rendering, the RTHandle system checks if it is larger than the current recorded largest reference size. If it is, the RTHandle system reallocates all render textures internally to fit the new size and replaces the largest reference size with the new size. -An example of this process is as follows. When you allocate the main color buffer, it would use a scale of **1** because it is a full-screen texture. You want to render it at the resolution of the screen. A downscaled buffer for a quarter-resolution transparency pass would use a scale of **0.5** for both the x-axis and y-axis. Internally the RTHandle system allocates render textures using the largest reference size multiplied by the scale you declare for the render texture. After that, before each camera renders, you tell the system what the current reference size is. Based on that and the scaling factor for all textures, the RTHandle system determines if it needs to reallocate render textures. As mentioned above, if the new reference size is larger than the current largest reference size, the RTHandle system reallocates all render textures. By doing this, the RTHanle system ends up with a stable maximum resolution for all render textures, which is most likely the resolution of your main camera. +An example of this process is as follows. When you allocate the main color buffer, it uses a scale of **1** because it is a full-screen texture. You want to render it at the resolution of the screen. A downscaled buffer for a quarter-resolution transparency pass would use a scale of **0.5** for both the x-axis and y-axis. Internally the RTHandle system allocates render textures using the largest reference size multiplied by the scale you declare for the render texture. After that and before each Camera renders, you tell the system what the current reference size is. Based on that and the scaling factor for all textures, the RTHandle system determines if it needs to reallocate render textures. As mentioned above, if the new reference size is larger than the current largest reference size, the RTHandle system reallocates all render textures. By doing this, the RTHandle system ends up with a stable maximum resolution for all render textures, which is most likely the resolution of your main Camera. -The key takeaway of this is that the actual resolution of the render textures is not necessarily the same as the current viewport: It can be bigger. This has implications when writing a renderer using RTHandles which the [Using the RTHandle system](rthandle-system-using.md) documentation explains. +The key takeaway of this is that the actual resolution of the render textures is not necessarily the same as the current viewport: it can be bigger. This has implications when you write a renderer using RTHandles, which the [Using the RTHandle system](rthandle-system-using.md) documentation explains. -Note that the RTHandleSystem also allows you to allocate textures with a fixed size. In this case, the RTHandle system never reallocates the texture. This allows you to use the RTHandle API consistently for both automatically-resized textures the RTHandle system manages and regular fixed size textures that you manage. +The RTHandleSystem also allows you to allocate textures with a fixed size. In this case, the RTHandle system never reallocates the texture. This allows you to use the RTHandle API consistently for both automatically-resized textures that the RTHandle system manages and regular fixed size textures that you manage. diff --git a/Documentation~/rthandle-system-using.md b/Documentation~/rthandle-system-using.md index c6ed0bf..e08b66d 100644 --- a/Documentation~/rthandle-system-using.md +++ b/Documentation~/rthandle-system-using.md @@ -4,24 +4,19 @@ This page covers how to use the RTHandle system to manage render textures in you ### Initializing the RTHandle System -All operations related to `RTHandles` require an instance of the `RTHandleSystem` class. This class contains all the APIs necessary to allocate and release RTHandles as well as setting the **Reference Size** for the frame. This means that you need to create and maintain an instance of `RTHandleSystem` in your render pipeline or make use of the static RTHandles class mentioned later in this section. To create your own instance of `RTHandleSystem`, see the following code sample: +All operations related to `RTHandles` require an instance of the `RTHandleSystem` class. This class contains all the APIs necessary to allocate RTHandles, release RTHandles, and set the reference size for the frame. This means that you must create and maintain an instance of `RTHandleSystem` in your render pipeline or make use of the static RTHandles class mentioned later in this section. To create your own instance of `RTHandleSystem`, see the following code sample: ```c# RTHandleSystem m_RTHandleSystem = new RTHandleSystem(); m_RTHandleSystem.Initialize(Screen.width, Screen.height, scaledRTsupportsMSAA: true, scaledRTMSAASamples: MSAASamples.MSAA4x); ``` +When you initialize the system, you must supply the starting resolution. The above code example uses the width and height of the screen. Because the RTHandle system only reallocates render textures when a Camera requires a resolution larger than the current maximum size, the internal `RTHandle` resolution can only increase from the value you pass in here. It is good practice to initialize this resolution to be the resolution of the main display. This means the system does not need to unnecessarily reallocate the render textures (and cause unwanted memory spikes) at the beginning of the application. -When you initialize the system, you need to supply the starting resolution. The above code example uses the width and height of the screen. Since the RTHandle system only reallocates render textures when a camera requires a resolution larger than the current maximum size, the internal `RTHandle` resolution can only increase from the value you pass in here. It is good practice to initialize this resolution to be the resolution of the main display. This means the system does not need to unnecessarily reallocate the render textures (and cause unwanted memory spikes) at the beginning of the application. +If you want to use multi-sample anti-aliasing (MSAA), you must declare the MSAA sample mode during initialization. In the example code above, the RTHandle system supports MSAA and uses the MSAA4x mode. The RTHandle system allocates all textures with the same number of samples. You can change the sample mode later, but this changes the sample mode for all automatically resized textures. +You must only call the `Initialize` function once at the beginning of the application. After this, you can use the initialized instance to allocate textures. -If you want to use multi-sample anti-aliasing (MSAA), you need to declare the MSAA sample mode during initialization. In the example code above, the RTHandle system supports MSAA and uses the MSAA4x mode. The RTHandle system allocates all textures with the same number of samples. You can change the sample mode later, but note that this changes the sample mode for all automatically resized textures. - - -Note that you must only call the `Initialize` function once at the beginning of the application. - -After this, you can use the initialized instance to allocate textures. - -Since you allocate the majority of `RTHandles` from the same `RTHandleSystem` instance, the RTHandle system also provides a default global instance through the `RTHandles` static class. Rather than maintain your own instance of `RTHandleSystem`, this allows you to use the same API as you get with an instance, but not worry about the lifetime of the instance. Using the static instance, initialization becomes this: +Because you allocate the majority of `RTHandles` from the same `RTHandleSystem` instance, the RTHandle system also provides a default global instance through the `RTHandles` static class. Rather than maintain your own instance of `RTHandleSystem`, this allows you to use the same API that you get with an instance, but not worry about the lifetime of the instance. Using the static instance, the initialization becomes this: ```c# RTHandles.Initialize(Screen.width, Screen.height, scaledRTsupportsMSAA: true, scaledRTMSAASamples: MSAASamples.MSAA4x); @@ -31,7 +26,7 @@ The code examples in the rest of this page use the default global instance. ### Updating the RTHandle System -Before rendering with a camera, you need to set what resolution the RTHandle system uses as a reference size. To do so, call the `SetReferenceSize` function. +Before rendering with a Camera, you need to set the resolution the RTHandle system uses as a reference size. To do so, call the `SetReferenceSize` function. ```c# RTHandles.SetReferenceSize(width, hight, msaaSamples); @@ -48,11 +43,11 @@ After you initialize an instance of `RTHandleSystem`, whether this is your own i There are three main ways to allocate an `RTHandle`. They all use the same `Alloc` method on the RTHandleSystem instance. Most of the parameters of these functions are the same as the regular Unity RenderTexture ones, so for more information see the [RenderTexture API documentation](https://docs.unity3d.com/ScriptReference/RenderTexture.html). This section focuses on the parameters that relate to the size of the `RTHandle`: -- **Vector2 scaleFactor**: This variant requires a constant 2D scale for width and height. The RTHandle system uses this to calculate the resolution of the texture against the maximum reference size. For example, a scale of (1.0f, 1.0f) generates a full-screen texture. A scale of (0.5f 0.5f) generates a quarter-resolution texture. -- **ScaleFunc scaleFunc**: For cases when you don't want to use a constant scale to calculate the size of an `RTHandle`, you can provide a functor that calculates the size of the texture. The functor should take a Vector2Int as a parameter, which is the maximum reference size, and return a Vector2Int, which represents the size you want the texture to be. -- **Int width, int height**: This is for fixed-size textures. If you allocate a texture like this, it behaves like any regular RenderTexture. +- `Vector2 scaleFactor`: This variant requires a constant 2D scale for width and height. The RTHandle system uses this to calculate the resolution of the texture against the maximum reference size. For example, a scale of (1.0f, 1.0f) generates a full-screen texture. A scale of (0.5f 0.5f) generates a quarter-resolution texture. +- `ScaleFunc scaleFunc`: For cases when you don't want to use a constant scale to calculate the size of an `RTHandle`, you can provide a functor that calculates the size of the texture. The functor should take a `Vector2Int` as a parameter, which is the maximum reference size, and return a `Vector2In`t, which represents the size you want the texture to be. +- `int width, int height`: This is for fixed-size textures. If you allocate a texture like this, it behaves like any regular RenderTexture. -There are also overrides that create RTHandles from [RenderTargetIdentifier](https://docs.unity3d.com/ScriptReference/Rendering.RenderTargetIdentifier.html)*,* [RenderTextures](https://docs.unity3d.com/ScriptReference/RenderTexture.html), or [Textures](https://docs.unity3d.com/Manual/Textures.html)*.* These are useful when you want to use the RTHandle API to interact with all your textures, even though the texture might not be an actual `RTHandle`*.* +There are also overrides that create RTHandles from [RenderTargetIdentifier](https://docs.unity3d.com/ScriptReference/Rendering.RenderTargetIdentifier.html). [RenderTextures](https://docs.unity3d.com/ScriptReference/RenderTexture.html), or [Textures](https://docs.unity3d.com/Manual/Textures.html). These are useful when you want to use the RTHandle API to interact with all your textures, even though the texture might not be an actual `RTHandle`. The following code sample contains example uses of the `Alloc` function: @@ -80,25 +75,22 @@ myRTHandle.Release(); ## Using RTHandles -After you allocate an RTHandle, you can use it exactly like a regular RenderTexture. There are implicit conversions to `RenderTargetIdentifier` as well as `RenderTexture` you can even use them with regular related Unity APIs. +After you allocate an RTHandle, you can use it exactly like a regular RenderTexture. There are implicit conversions to `RenderTargetIdentifier` and `RenderTexture`, which means you can use them with regular related Unity APIs. - -However, when you use the RTHandle system, the actual resolution of the `RTHandle` might be different from the current resolution. For example, if the main camera renders at 1920x1080 and a secondary camera renders at 512x512, all RTHandle resolutions are based on the 1920x1080 resolution, even when rendering at lower resolutions. Because of this, take care when you set an RTHandle up as a render target. There are a number of APIs available in the [CoreUtils](../api/UnityEngine.Rendering.CoreUtils.html) class to help you with this. For example: +However, when you use the RTHandle system, the actual resolution of the `RTHandle` might be different from the current resolution. For example, if the main Camera renders at 1920x1080 and a secondary Camera renders at 512x512, all RTHandle resolutions are based on the 1920x1080 resolution, even when rendering at lower resolutions. Because of this, take care when you set an RTHandle up as a render target. There are a number of APIs available in the [CoreUtils](../api/UnityEngine.Rendering.CoreUtils.html) class to help you with this. For example: ```c# public static void SetRenderTarget(CommandBuffer cmd, RTHandle buffer, ClearFlag clearFlag, Color clearColor, int miplevel = 0, CubemapFace cubemapFace = CubemapFace.Unknown, int depthSlice = -1) ``` - This function sets the `RTHandle` as the active render target but also sets up the viewport based on the scale of the `RTHandle` and the current reference size, not the maximum size. - -For example, when the reference size is 512x512, even if the maximum size is 1920x1080, a texture of scale (1.0f, 1.0f) uses the 512x512 size and therefore a 512x512 viewport. A (0.5f, 0.5f) scaled texture sets a viewport of 256x256 and so on. This means that, when using these helper functions, the RTHandle system generates the correct viewport based on the `RTHandle` parameters. +For example, when the reference size is 512x512, even if the maximum size is 1920x1080, a texture of scale (1.0f, 1.0f) uses the 512x512 size and therefore sets up a 512x512 viewport. A (0.5f, 0.5f) scaled texture sets up a viewport of 256x256 and so on. This means that, when using these helper functions, the RTHandle system generates the correct viewport based on the `RTHandle` parameters. This example is one of many different overrides for the `SetRenderTarget` function. For the full list of overrides, see the [documentation](../api/UnityEngine.Rendering.CoreUtils.html#UnityEngine_Rendering_CoreUtils_SetRenderTarget_CommandBuffer_RenderTargetIdentifier_RenderBufferLoadAction_RenderBufferStoreAction_RenderTargetIdentifier_RenderBufferLoadAction_RenderBufferStoreAction_UnityEngine_Rendering_ClearFlag_). ## Using RTHandles in shaders -Usually, when sampling from a full-screen render texture in a shader, UVs span the whole 0 to 1 range. This is no longer always the case with `RTHandles`. It is possible that the current rendering only occurs in a partial viewport. To take this into account, you need to apply a scale to UVs when you sample `RTHandles` that use a scale. All the information necessary to handle `RTHandles` specificity inside shaders is in the `RTHandeProperties` structure that the `RTHandleSystem` instance provides. To access it, use: +When you sample from a full-screen render texture in a shader in the usual way, UVs span the whole 0 to 1 range. This is not always the case with `RTHandles`. The current rendering might only occur in a partial viewport. To take this into account, you must apply a scale to UVs when you sample `RTHandles` that use a scale. All the information necessary to handle `RTHandles` specificity inside shaders is in the `RTHandeProperties` structure that the `RTHandleSystem` instance provides. To access it, use: ```c# RTHandleProperties rtHandleProperties = RTHandles.rtHandleProperties; @@ -117,7 +109,7 @@ public struct RTHandleProperties } ``` -It provides: +This structure provides: - The current viewport size. This is the reference size you set for rendering. - The current render target size. This is the actual size of the render texture based on the maximum reference size. @@ -131,19 +123,19 @@ float2 scaledUVs = fullScreenUVs * rtHandleScale.xy; However, because the partial viewport always starts at (0, 0), when you use integer pixel coordinates within the viewport to load content from a texture, there is no need to rescale them. -Another important thing to consider is that, when rendering a full-screen quad into a partial viewport, there is no benefit from standard UV addressing mechanisms such as wrap or clamp. This is because the texture might be bigger than the viewport. For this reason, take care when sampling pixels outside of the viewport. +Another important thing to consider is that, when you render a full-screen quad into a partial viewport, there is no benefit from standard UV addressing mechanisms such as wrap or clamp. This is because the texture might be bigger than the viewport. For this reason, take care when you sample pixels outside of the viewport. ### Custom SRP specific information -There are no shader constants provided by default with SRP. So, when you use RTHandles with your own SRP, you need to provide these constants to their shaders themselves. +There are no shader constants provided by default with SRP. So, when you use RTHandles with your own SRP, you must provide these constants to their shaders themselves. ## Camera specific RTHandles -Most of the render textures that a rendering loop uses can be shared by all cameras. As long as their content does not need to carry from one frame to another, this is fine. However, some render textures need persistence. A good example of this is using the main color buffer in subsequent frames for Temporal Anti-aliasing. This means that the camera cannot share its RTHandle with other cameras. Most of the time, this also means that these RTHandles need to be at least double-buffered (written to in the current frame, read from the previous frame). To address this problem, the RTHandle system includes `BufferedRTHandleSystems`. +Most of the render textures that a rendering loop uses can be shared by all Cameras. If their content does not need to carry from one frame to another, this is fine. However, some render textures need persistence. A good example of this is using the main color buffer in subsequent frames for Temporal Anti-aliasing. This means that the Camera cannot share its RTHandle with other Cameras. Most of the time, this also means that these RTHandles must be at least double-buffered (written to during the current frame, read from during the previous frame). To address this problem, the RTHandle system includes `BufferedRTHandleSystems`. -A `BufferedRTHandleSystem` is an `RTHandleSystem` that can multi-buffer RTHandles. The idea is to identify a buffer by a unique id and provide APIs to allocate a number of instances of the same buffer and retrieve them from previous frames. These are **history buffers**. Usually, you need to allocate one `BufferedRTHandleSystem` for each camera. Each one owns their camera-specific RTHandles. +A `BufferedRTHandleSystem` is an `RTHandleSystem` that can multi-buffer RTHandles. The principle is to identify a buffer by a unique ID and provide APIs to allocate a number of instances of the same buffer then retrieve them from previous frames. These are history buffers. Usually, you must allocate one `BufferedRTHandleSystem` for each Camera. Each one owns their Camera-specific RTHandles. -One of the other consequences is that these history buffers don’t need to be allocated for every camera. For example, if a camera does not need Temporal Anti-aliasing, you can save the memory for it. Another consequence is that the system only allocates history buffers at the resolution of the camera. If the main camera is 1920x1080 and another camera renders in 256x256 and needs a history color buffer, this one only uses a 256x256 buffer and not a 1920*1080 buffer as the non-camera specific RTHandles would. To create an instance of a `BufferedRTHandleSystem`, see the following code sample: +Not every Camera needs history buffers. For example, if a Camera does not need Temporal Anti-aliasing, you do not need to assign a `BufferedRTHandleSystem` to it. History buffers require memory which means you can save memory by not assigning history buffers to Cameras that do not need them. Another consequence is that the system only allocates history buffers at the resolution of the Camera that the buffers are for. If the main Camera is 1920x1080 and another Camera renders in 256x256 and needs a history color buffer, the second Camera only uses a 256x256 buffer and not a 1920x1080 buffer as the non-Camera specific RTHandles would. To create an instance of a `BufferedRTHandleSystem`, see the following code sample: ```c# BufferedRTHandleSystem m_HistoryRTSystem = new BufferedRTHandleSystem(); @@ -155,9 +147,9 @@ To allocate an `RTHandle` using a `BufferedRTHandleSystem`, the process is diffe public void AllocBuffer(int bufferId, Func allocator, int bufferCount); ``` -The `bufferId` is a unique id the system uses to identify the buffer. The allocator is a function you provide to actually allocate the `RTHandles` when needed (all instances are not allocated upfront), and the `bufferCount` is the number of instances requested. +The `bufferId` is a unique ID that the system uses to identify the buffer. The allocator is a function you provide to allocate the `RTHandles` when needed (all instances are not allocated upfront), and the `bufferCount` is the number of instances requested. -From there, you can retrieve each `RTHandle` by its id and instance index like so: +From there, you can retrieve each `RTHandle` by its ID and instance index like so: ```c# public RTHandle GetFrameRT(int bufferId, int frameIndex); @@ -165,25 +157,25 @@ public RTHandle GetFrameRT(int bufferId, int frameIndex); The frame index is between zero and the number of buffers minus one. Zero always represents the current frame buffer, one the previous frame buffer, two the one before that, and so on. -To release a buffered RTHandle, call the `Release` function on the `BufferedRTHandleSystem`, passing in the id of the buffer to release: +To release a buffered RTHandle, call the `Release` function on the `BufferedRTHandleSystem`, passing in the ID of the buffer to release: ```c# public void ReleaseBuffer(int bufferId); ``` -In the same way that you provide the reference size for regular `RTHandleSystems`, you need to do the same for each instance of `BufferedRTHandleSystem`*.* +In the same way that you provide the reference size for regular `RTHandleSystems`, you must do this for each instance of `BufferedRTHandleSystem`. ```c# public void SwapAndSetReferenceSize(int width, int height, MSAASamples msaaSamples); ``` -This works the same way as regular RTHandleSystem but it also swaps the buffers internally so that the 0 index for `GetFrameRT` still references the current frame buffer. This slightly different way of handling camera-specific buffers also has implications when writing shader code. +This works the same way as regular RTHandleSystem but it also swaps the buffers internally so that the 0 index for `GetFrameRT` still references the current frame buffer. This slightly different way of handling Camera-specific buffers also has implications when you write shader code. -With a multi-buffered approach like this, it means that `RTHandles` from a previous frame may have a different size to the one from the current frame (for example, this can happen with dynamic resolution or even just when resizing the window in the editor). This means that when you access a buffered `RTHandle` from a previous frame, you need to scale it accordingly. The scale used to do this is in `RTHandleProperties.rtHandleScale.zw`. It is used exactly the same way as `xy` for regular RTHandles. This is also the reason why `RTHandleProperties` contains the viewport and resolution of the previous frame. It can be useful when doing computation with history buffers. +With a multi-buffered approach like this, `RTHandles` from a previous frame might have a different size to the one from the current frame. For example, this can happen with dynamic resolution or even when you resize the window in the Editor. This means that when you access a buffered `RTHandle` from a previous frame, you must scale it accordingly. The scale Unity uses to do this is in `RTHandleProperties.rtHandleScale.zw`. Unity uses this in exactly the same way as `xy` for regular RTHandles. This is also the reason why `RTHandleProperties` contains the viewport and resolution of the previous frame. It can be useful when doing computation with history buffers. ## Dynamic Resolution -One of the byproducts of the RTHandle System design is that it can also be used to simulate software dynamic resolution. Since the current resolution of the camera is not directly correlated to the actual render texture objects, you can provide any resolution you want at the beginning of the frame and all render textures scale accordingly. +One of the byproducts of the RTHandle System design is that you can also use it to simulate software dynamic resolution. Because the current resolution of the Camera is not directly correlated to the actual render texture objects, you can provide any resolution you want at the beginning of the frame and all render textures scale accordingly. ## Reset Reference Size @@ -193,4 +185,4 @@ Sometimes, you might need to render to a higher resolution than normal for a sho RTHandles.ResetReferenceSize(newWidth, newHeight); ``` -This forces the RTHandle system to reallocate all RTHandles to the new provided size. This is the only way to shrink down the size of `RTHandles`. +This forces the RTHandle system to reallocate all RTHandles to the new provided size. This is the only way to shrink the size of `RTHandles`. diff --git a/Documentation~/rthandle-system.md b/Documentation~/rthandle-system.md index a4ea3b2..a97b91d 100644 --- a/Documentation~/rthandle-system.md +++ b/Documentation~/rthandle-system.md @@ -2,10 +2,9 @@ Render target management is an important part of any render pipeline. In a complicated render pipeline where there are many interdependent render passes that use many different render textures, it is important to have a maintainable and extendable system that allows for easy memory management. +One of the biggest issues occurs when a render pipeline uses many different Cameras, each with their own resolution. For example, off-screen Cameras or real-time reflection probes. In this scenario, if the system allocated render textures independently for each Camera, the total amount of memory would increase to unmanageable levels. This is particularly bad for complex render pipelines that use many intermediate render textures. Unity can use [temporary render textures](https://docs.unity3d.com/ScriptReference/RenderTexture.GetTemporary.html), but unfortunately, they do not suit this kind of use case because temporary render textures can only reuse memory if a new render texture uses the exact same properties and resolution. This means that when rendering with two different resolutions, the total amount of memory Unity uses is the sum of all resolutions. -One of the biggest issues occurs when a render pipeline uses many different cameras, each with their own resolution. For example, off-screen cameras or real-time reflection probes. In this scenario, if the system allocated render textures independently for each camera, the total amount of memory would increase to unmanageable levels. This is particularly bad for complex render pipelines that use many intermediate render textures. Unity can use [temporary render textures](https://docs.unity3d.com/ScriptReference/RenderTexture.GetTemporary.html), but unfortunately, they do not suit this kind of use case because temporary render textures can only reuse memory if a new render texture uses the exact same properties and resolution. This means that when rendering with two different resolutions, the total amount of memory Unity uses is the sum of all resolutions. - -To solve these issues with render texture memory allocation, Unity's Scriptable Render Pipeline includes the RTHandle (RTHandle) system. This system is an abstraction layer over Unity's [RenderTexture](https://docs.unity3d.com/ScriptReference/RenderTexture.html) API that handles render texture management automatically. +To solve these issues with render texture memory allocation, Unity's Scriptable Render Pipeline includes the RTHandle system. This system is an abstraction layer on top of Unity's [RenderTexture](https://docs.unity3d.com/ScriptReference/RenderTexture.html) API that handles render texture management automatically. This section contains the following pages: diff --git a/Editor/CoreEditorUtils.cs b/Editor/CoreEditorUtils.cs index cf99aff..91ccbff 100644 --- a/Editor/CoreEditorUtils.cs +++ b/Editor/CoreEditorUtils.cs @@ -884,6 +884,27 @@ public static T[] GetAdditionalData(UnityEngine.Object[] targets, Action i return data; } + /// Add the appropriate AdditionalData to the given GameObject and its children containing the original component + /// The type of the original component + /// The type of the AdditionalData component + /// The root object to update + /// [Optional] The default value to use if there is no AdditionalData + public static void AddAdditionalData(GameObject go, Action initDefault = null) + where T : Component + where AdditionalT : Component + { + var components = go.GetComponentsInChildren(typeof(T), true); + foreach (var c in components) + { + if (!c.TryGetComponent(out _)) + { + var hd = c.gameObject.AddComponent(); + if (initDefault != null) + initDefault(hd); + } + } + } + /// Create a game object /// The parent /// The wanted name (can be updated with a number if a sibling with same name exist diff --git a/Editor/Gizmo/GizmoUtility.cs b/Editor/Gizmo/GizmoUtility.cs new file mode 100644 index 0000000..2e24de3 --- /dev/null +++ b/Editor/Gizmo/GizmoUtility.cs @@ -0,0 +1,26 @@ +using System; +using UnityEngine; + +namespace UnityEditor.Rendering +{ + public static class GizmoUtility + { + public static Color GetHandleColor(Color baseColor) + { + baseColor.a = 1f; + return baseColor; + } + + public static Color GetWireframeColor(Color baseColor) + { + baseColor.a = .7f; + return baseColor; + } + + public static Color GetWireframeColorBehindObjects(Color baseColor) + { + baseColor.a = .2f; + return baseColor; + } + } +} diff --git a/Editor/Gizmo/GizmoUtility.cs.meta b/Editor/Gizmo/GizmoUtility.cs.meta new file mode 100644 index 0000000..3fb6b73 --- /dev/null +++ b/Editor/Gizmo/GizmoUtility.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 13d32c9cdf2984447b4802095ce716ae +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/Gizmo/HierarchicalBox.cs b/Editor/Gizmo/HierarchicalBox.cs index e0dbb33..d2d225d 100644 --- a/Editor/Gizmo/HierarchicalBox.cs +++ b/Editor/Gizmo/HierarchicalBox.cs @@ -115,17 +115,24 @@ public Color baseColor set { value.a = 8f / 255; - m_MonochromeFillColor = value; - material.color = m_MonochromeFillColor; - value.a = 1f; - m_MonochromeHandleColor = value; - value.a = 0.7f; - m_WireframeColor = value; - value.a = 0.2f; - m_WireframeColorBehind = value; + SetBaseColor(value); } } + /// + /// Set the baseColor used to fill hull. All other colors are deduced from it except specific handle colors. + /// Instead of baseColor set, this will not force the opacity and keep what is provided for the filled faces. + /// + /// The color to use + public void SetBaseColor(Color color) + { + m_MonochromeFillColor = color; + material.color = m_MonochromeFillColor; + m_MonochromeHandleColor = GizmoUtility.GetHandleColor(color); + m_WireframeColor = GizmoUtility.GetWireframeColor(color); + m_WireframeColorBehind = GizmoUtility.GetWireframeColorBehindObjects(color); + } + //Note: Handles.Slider not allow to use a specific ControlID. //Thus Slider1D is used (with reflection) static Type k_Slider1D = Type.GetType("UnityEditorInternal.Slider1D, UnityEditor"); diff --git a/Editor/LookDev/CameraController.cs b/Editor/LookDev/CameraController.cs index 26b67c5..a2271c8 100644 --- a/Editor/LookDev/CameraController.cs +++ b/Editor/LookDev/CameraController.cs @@ -129,13 +129,17 @@ virtual protected bool isDragging } } - public CameraController(CameraState cameraState, DisplayWindow window, Action focused) + public CameraController(DisplayWindow window, Action focused) { - m_CameraState = cameraState; m_Window = window; m_Focused = focused; } + public void UpdateCameraState(Context context, ViewIndex index) + { + m_CameraState = context.GetViewContent(index).camera; + } + private void ResetCameraControl() { isDragging = false; @@ -458,16 +462,22 @@ class SwitchableCameraController : CameraController bool switchedDrag = false; bool switchedWheel = false; - public SwitchableCameraController(CameraState cameraStateFirstView, CameraState cameraStateSecondView, DisplayWindow window, Action focused) - : base(cameraStateFirstView, window, null) + public SwitchableCameraController(DisplayWindow window, Action focused) + : base(window, null) { - m_FirstView = cameraStateFirstView; - m_SecondView = cameraStateSecondView; m_CurrentViewIndex = ViewIndex.First; m_Focused = () => focused?.Invoke(m_CurrentViewIndex); } + public void UpdateCameraState(Context context) + { + m_FirstView = context.GetViewContent(ViewIndex.First).camera; + m_SecondView = context.GetViewContent(ViewIndex.Second).camera; + + m_CameraState = m_CurrentViewIndex == ViewIndex.First ? m_FirstView : m_SecondView; + } + void SwitchTo(ViewIndex index) { CameraState stateToSwitch; diff --git a/Editor/LookDev/ComparisonGizmoController.cs b/Editor/LookDev/ComparisonGizmoController.cs index 645fea4..9955fd6 100644 --- a/Editor/LookDev/ComparisonGizmoController.cs +++ b/Editor/LookDev/ComparisonGizmoController.cs @@ -49,12 +49,16 @@ bool isDragging } } - public ComparisonGizmoController(ComparisonGizmoState state, SwitchableCameraController switcher) + public ComparisonGizmoController(SwitchableCameraController switcher) { - m_State = state; m_Switcher = switcher; } + public void UpdateGizmoState(ComparisonGizmoState state) + { + m_State = state; + } + protected override void RegisterCallbacksOnTarget() { target.RegisterCallback(OnMouseDown); diff --git a/Editor/LookDev/Compositor.cs b/Editor/LookDev/Compositor.cs index bc2ce08..15698d2 100644 --- a/Editor/LookDev/Compositor.cs +++ b/Editor/LookDev/Compositor.cs @@ -150,17 +150,15 @@ public bool pixelPerfect public Compositer( IViewDisplayer displayer, - Context contexts, IDataProvider dataProvider, StageCache stages) { m_Displayer = displayer; - m_Contexts = contexts; m_RenderDataCache = new RenderingData[2] { - new RenderingData() { stage = stages[ViewIndex.First], updater = contexts.GetViewContent(ViewIndex.First).camera }, - new RenderingData() { stage = stages[ViewIndex.Second], updater = contexts.GetViewContent(ViewIndex.Second).camera } + new RenderingData() { stage = stages[ViewIndex.First] }, + new RenderingData() { stage = stages[ViewIndex.Second] } }; m_Displayer.OnRenderDocAcquisitionTriggered += RenderDocAcquisitionRequested; @@ -197,6 +195,12 @@ public void Dispose() public void Render() { + // This can happen when entering/leaving playmode. + if (LookDev.dataProvider == null) + return; + + m_Contexts = LookDev.currentContext; + //TODO: make integration EditorWindow agnostic! if (UnityEditorInternal.RenderDoc.IsLoaded() && UnityEditorInternal.RenderDoc.IsSupported() && m_RenderDocAcquisitionRequested) UnityEditorInternal.RenderDoc.BeginCaptureRenderDoc(m_Displayer as EditorWindow); @@ -236,11 +240,13 @@ void AcquireDataForView(ViewIndex index, Rect viewport) m_RenderTextures.UpdateSize(renderingData.viewPort, index, m_Renderer.pixelPerfect, renderingData.stage.camera); - int debugMode = m_Contexts.GetViewContent(index).debug.viewMode; + int debugMode = view.debug.viewMode; if (debugMode != -1) LookDev.dataProvider.UpdateDebugMode(debugMode); renderingData.output = m_RenderTextures[index, ShadowCompositionPass.MainView]; + renderingData.updater = view.camera; + m_Renderer.BeginRendering(renderingData, LookDev.dataProvider); m_Renderer.Acquire(renderingData); diff --git a/Editor/LookDev/DisplayWindow.cs b/Editor/LookDev/DisplayWindow.cs index 3f482e0..ee8c367 100644 --- a/Editor/LookDev/DisplayWindow.cs +++ b/Editor/LookDev/DisplayWindow.cs @@ -217,6 +217,10 @@ SidePanel sidePanel StyleSheet styleSheet = null; StyleSheet styleSheetLight = null; + SwitchableCameraController m_FirstOrCompositeManipulator; + CameraController m_SecondManipulator; + ComparisonGizmoController m_GizmoManipulator; + void ReloadStyleSheets() { if (styleSheet == null || styleSheet.Equals(null)) @@ -395,9 +399,7 @@ void CreateViews() m_Views[(int)ViewIndex.Second] = new Image() { name = Style.k_SecondViewName, image = Texture2D.blackTexture }; m_ViewContainer.Add(m_Views[(int)ViewIndex.Second]); - var firstOrCompositeManipulator = new SwitchableCameraController( - LookDev.currentContext.GetViewContent(ViewIndex.First).camera, - LookDev.currentContext.GetViewContent(ViewIndex.Second).camera, + m_FirstOrCompositeManipulator = new SwitchableCameraController( this, index => { @@ -406,8 +408,7 @@ void CreateViews() if (sidePanel == SidePanel.Environment && environment != null && LookDev.currentContext.environmentLibrary != null) m_EnvironmentList.selectedIndex = LookDev.currentContext.environmentLibrary.IndexOf(environment); }); - var secondManipulator = new CameraController( - LookDev.currentContext.GetViewContent(ViewIndex.Second).camera, + m_SecondManipulator = new CameraController( this, () => { @@ -416,10 +417,10 @@ void CreateViews() if (sidePanel == SidePanel.Environment && environment != null && LookDev.currentContext.environmentLibrary != null) m_EnvironmentList.selectedIndex = LookDev.currentContext.environmentLibrary.IndexOf(environment); }); - var gizmoManipulator = new ComparisonGizmoController(LookDev.currentContext.layout.gizmoState, firstOrCompositeManipulator); - m_Views[(int)ViewIndex.First].AddManipulator(gizmoManipulator); //must take event first to switch the firstOrCompositeManipulator - m_Views[(int)ViewIndex.First].AddManipulator(firstOrCompositeManipulator); - m_Views[(int)ViewIndex.Second].AddManipulator(secondManipulator); + m_GizmoManipulator = new ComparisonGizmoController(m_FirstOrCompositeManipulator); + m_Views[(int)ViewIndex.First].AddManipulator(m_GizmoManipulator); //must take event first to switch the firstOrCompositeManipulator + m_Views[(int)ViewIndex.First].AddManipulator(m_FirstOrCompositeManipulator); + m_Views[(int)ViewIndex.Second].AddManipulator(m_SecondManipulator); m_NoObject1 = new Label(Style.k_DragAndDropObject); m_NoObject1.style.flexGrow = 1; @@ -667,6 +668,11 @@ void Update() Debug.LogError("LookDev is not supported: No SRP detected."); LookDev.Close(); } + + // All those states coming from the Contexts can become invalid after a domain reload so we need to update them. + m_FirstOrCompositeManipulator.UpdateCameraState(LookDev.currentContext); + m_SecondManipulator.UpdateCameraState(LookDev.currentContext, ViewIndex.Second); + m_GizmoManipulator.UpdateGizmoState(LookDev.currentContext.layout.gizmoState); } void OnGUI() diff --git a/Editor/LookDev/EnvironmentLibrary.cs b/Editor/LookDev/EnvironmentLibrary.cs index 03edd2a..3e84f03 100644 --- a/Editor/LookDev/EnvironmentLibrary.cs +++ b/Editor/LookDev/EnvironmentLibrary.cs @@ -4,6 +4,7 @@ using System.Collections.Generic; using UnityEngine.UIElements; using System.IO; +using UnityEditor; using UnityEditor.UIElements; namespace UnityEditor.Rendering.LookDev @@ -117,14 +118,15 @@ public int IndexOf(Environment environment) [CustomEditor(typeof(EnvironmentLibrary))] class EnvironmentLibraryEditor : Editor { - VisualElement root; + VisualElement m_Root; + VisualElement m_OpenButton; public sealed override VisualElement CreateInspectorGUI() { var library = target as EnvironmentLibrary; - root = new VisualElement(); + m_Root = new VisualElement(); - Button open = new Button(() => + m_OpenButton = new Button(() => { if (!LookDev.open) LookDev.Open(); @@ -132,11 +134,22 @@ public sealed override VisualElement CreateInspectorGUI() LookDev.currentEnvironmentDisplayer.Repaint(); }) { - text = "Open in LookDev window" + text = "Open in Look Dev window" }; + m_OpenButton.SetEnabled(LookDev.supported); - root.Add(open); - return root; + m_Root.Add(m_OpenButton); + return m_Root; + } + + void OnEnable() => EditorApplication.update += Update; + void OnDisable() => EditorApplication.update -= Update; + + void Update() + { + // Current SRP can be changed at any time so we need to do this at every update. + if (m_OpenButton != null) + m_OpenButton.SetEnabled(LookDev.supported); } // Don't use ImGUI diff --git a/Editor/LookDev/LookDev.cs b/Editor/LookDev/LookDev.cs index 0e7fb8c..589ea1e 100644 --- a/Editor/LookDev/LookDev.cs +++ b/Editor/LookDev/LookDev.cs @@ -28,7 +28,18 @@ internal static IDataProvider dataProvider internal static Context currentContext { //Lazy init: load it when needed instead in static even if you do not support lookdev - get => s_CurrentContext ?? (s_CurrentContext = LoadConfigInternal() ?? defaultContext); + get + { + if (s_CurrentContext == null || s_CurrentContext.Equals(null)) + { + s_CurrentContext = LoadConfigInternal(); + if (s_CurrentContext == null) + s_CurrentContext = defaultContext; + + ReloadStage(false); + } + return s_CurrentContext; + } private set => s_CurrentContext = value; } @@ -118,7 +129,10 @@ internal static void Initialize(DisplayWindow window) s_ViewDisplayer = window; s_EnvironmentDisplayer = window; open = true; - ConfigureLookDev(reloadWithTemporaryID: false); + + // Lookdev Initialize can be called when the window is re-created by the editor layout system. + // In that case, the current context won't be null and there might be objects to reload from the temp ID + ConfigureLookDev(reloadWithTemporaryID: s_CurrentContext != null); } [Callbacks.DidReloadScripts] @@ -161,9 +175,9 @@ static void WaitingSRPReloadForConfiguringRenderer(int maxAttempt, bool reloadWi static void ConfigureRenderer(bool reloadWithTemporaryID) { s_Stages?.Dispose(); //clean previous occurrence on reloading - s_Stages = new StageCache(dataProvider, currentContext); + s_Stages = new StageCache(dataProvider); s_Compositor?.Dispose(); //clean previous occurrence on reloading - s_Compositor = new Compositer(s_ViewDisplayer, currentContext, dataProvider, s_Stages); + s_Compositor = new Compositer(s_ViewDisplayer, dataProvider, s_Stages); } static void LinkViewDisplayer() @@ -177,10 +191,6 @@ static void LinkViewDisplayer() s_ViewDisplayer = null; //currentContext = null; - //release editorInstanceIDs - currentContext.GetViewContent(ViewIndex.First).CleanTemporaryObjectIndexes(); - currentContext.GetViewContent(ViewIndex.Second).CleanTemporaryObjectIndexes(); - SaveConfig(); open = false; @@ -227,7 +237,12 @@ static void LinkViewDisplayer() static void LinkEnvironmentDisplayer() { - s_EnvironmentDisplayer.OnChangingEnvironmentLibrary += currentContext.UpdateEnvironmentLibrary; + s_EnvironmentDisplayer.OnChangingEnvironmentLibrary += UpdateEnvironmentLibrary; + } + + static void UpdateEnvironmentLibrary(EnvironmentLibrary library) + { + LookDev.currentContext.UpdateEnvironmentLibrary(library); } static void ReloadStage(bool reloadWithTemporaryID) diff --git a/Editor/LookDev/Stage.cs b/Editor/LookDev/Stage.cs index 08551be..d67d18b 100644 --- a/Editor/LookDev/Stage.cs +++ b/Editor/LookDev/Stage.cs @@ -284,7 +284,6 @@ class StageCache : IDisposable const string secondStageName = "LookDevSecondView"; Stage[] m_Stages; - Context m_Contexts; IDataProvider m_CurrentDataProvider; public Stage this[ViewIndex index] @@ -292,9 +291,8 @@ class StageCache : IDisposable public bool initialized { get; private set; } - public StageCache(IDataProvider dataProvider, Context contexts) + public StageCache(IDataProvider dataProvider) { - m_Contexts = contexts; m_Stages = new Stage[2] { InitStage(ViewIndex.First, dataProvider), @@ -333,7 +331,7 @@ public void UpdateSceneObjects(ViewIndex index) Stage stage = this[index]; stage.Clear(); - var viewContent = m_Contexts.GetViewContent(index); + var viewContent = LookDev.currentContext.GetViewContent(index); if (viewContent == null) { viewContent.viewedInstanceInPreview = null; @@ -347,7 +345,7 @@ public void UpdateSceneObjects(ViewIndex index) public void UpdateSceneLighting(ViewIndex index, IDataProvider provider) { Stage stage = this[index]; - Environment environment = m_Contexts.GetViewContent(index).environment; + Environment environment = LookDev.currentContext.GetViewContent(index).environment; provider.UpdateSky(stage.camera, environment == null ? default : environment.sky, stage.runtimeInterface); diff --git a/Editor/MaterialUpgrader.cs b/Editor/MaterialUpgrader.cs index e499e33..e31c0a9 100644 --- a/Editor/MaterialUpgrader.cs +++ b/Editor/MaterialUpgrader.cs @@ -331,7 +331,7 @@ public static void UpgradeProjectFolder(List upgraders, string /// Material Upgrader flags. public static void UpgradeProjectFolder(List upgraders, HashSet shaderNamesToIgnore, string progressBarName, UpgradeFlags flags = UpgradeFlags.None) { - if (!EditorUtility.DisplayDialog(DialogText.title, "The upgrade will overwrite materials in your project. " + DialogText.projectBackMessage, DialogText.proceed, DialogText.cancel)) + if ((!Application.isBatchMode) && (!EditorUtility.DisplayDialog(DialogText.title, "The upgrade will overwrite materials in your project. " + DialogText.projectBackMessage, DialogText.proceed, DialogText.cancel))) return; int totalMaterialCount = 0; diff --git a/Editor/ProjectorEditor.cs b/Editor/ProjectorEditor.cs new file mode 100644 index 0000000..c844c94 --- /dev/null +++ b/Editor/ProjectorEditor.cs @@ -0,0 +1,23 @@ +using UnityEngine; +using UnityEngine.Rendering; + +namespace UnityEditor.Rendering +{ + [CustomEditorForRenderPipeline(typeof(Projector), typeof(RenderPipelineAsset))] + [CanEditMultipleObjects] + class ProjectorEditor : Editor + { + static readonly GUIContent k_Message = EditorGUIUtility.TrTextContent("The active render pipeline does not support the Projector component. If using HDRP, use the Decal Projector component instead."); + + public override void OnInspectorGUI() + { + EditorGUILayout.HelpBox(k_Message.text, MessageType.Warning); + + using (new EditorGUI.DisabledScope(true)) + { + // Projector doesn't have a CustomEditor, so we can just draw the default inspector. + DrawDefaultInspector(); + } + } + } +} diff --git a/Editor/ProjectorEditor.cs.meta b/Editor/ProjectorEditor.cs.meta new file mode 100644 index 0000000..ace67aa --- /dev/null +++ b/Editor/ProjectorEditor.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: cb4bfc396f2f4be69c46d1055d00781f +timeCreated: 1611068571 \ No newline at end of file diff --git a/Editor/Volume/Drawers/IntParameterDrawer.cs b/Editor/Volume/Drawers/IntParameterDrawer.cs index e59ff39..67c2fc9 100644 --- a/Editor/Volume/Drawers/IntParameterDrawer.cs +++ b/Editor/Volume/Drawers/IntParameterDrawer.cs @@ -1,5 +1,6 @@ using UnityEngine; using UnityEngine.Rendering; +using UnityEditorInternal; namespace UnityEditor.Rendering { @@ -104,4 +105,19 @@ public override bool OnGUI(SerializedDataParameter parameter, GUIContent title) return true; } } + + [VolumeParameterDrawer(typeof(LayerMaskParameter))] + sealed class LayerMaskParameterDrawer : VolumeParameterDrawer + { + public override bool OnGUI(SerializedDataParameter parameter, GUIContent title) + { + var value = parameter.value; + + if (value.propertyType != SerializedPropertyType.LayerMask) + return false; + + value.intValue = EditorGUILayout.MaskField(title, value.intValue, InternalEditorUtility.layers); + return true; + } + } } diff --git a/Editor/Volume/VolumeComponentEditor.cs b/Editor/Volume/VolumeComponentEditor.cs index 88673fe..12d4c41 100644 --- a/Editor/Volume/VolumeComponentEditor.cs +++ b/Editor/Volume/VolumeComponentEditor.cs @@ -2,6 +2,7 @@ using System.Collections.Generic; using System.Linq; using System.Reflection; +using UnityEditor.AnimatedValues; using UnityEngine; using UnityEngine.Assertions; using UnityEngine.Rendering; @@ -75,6 +76,26 @@ public VolumeComponentEditorAttribute(Type componentType) /// public class VolumeComponentEditor { + class Styles + { + public static GUIContent overrideSettingText { get; } = EditorGUIUtility.TrTextContent("", "Override this setting for this volume."); + public static GUIContent allText { get; } = EditorGUIUtility.TrTextContent("ALL", "Toggle all overrides on. To maximize performances you should only toggle overrides that you actually need."); + public static GUIContent noneText { get; } = EditorGUIUtility.TrTextContent("NONE", "Toggle all overrides off."); + + public static string toggleAllText { get; } = L10n.Tr("Toggle All"); + } + + Vector2? m_OverrideToggleSize; + internal Vector2 overrideToggleSize + { + get + { + if (!m_OverrideToggleSize.HasValue) + m_OverrideToggleSize = CoreEditorStyles.smallTickbox.CalcSize(Styles.overrideSettingText); + return m_OverrideToggleSize.Value; + } + } + /// /// Specifies the this editor is drawing. /// @@ -99,10 +120,11 @@ public class VolumeComponentEditor SerializedProperty m_AdvancedMode; + List m_VolumeNotAdditionalParameters; /// /// Override this property if your editor makes use of the "More Options" feature. /// - public virtual bool hasAdvancedMode => false; + public virtual bool hasAdvancedMode => target.parameters.Count != m_VolumeNotAdditionalParameters.Count; /// /// Checks if the editor currently has the "More Options" feature toggled on. @@ -176,9 +198,18 @@ internal void Init(VolumeComponent target, Editor inspector) serializedObject = new SerializedObject(target); activeProperty = serializedObject.FindProperty("active"); m_AdvancedMode = serializedObject.FindProperty("m_AdvancedMode"); + + InitParameters(); + OnEnable(); } + void InitParameters() + { + m_VolumeNotAdditionalParameters = new List(); + VolumeComponent.FindParameters(target, m_VolumeNotAdditionalParameters, field => field.GetCustomAttribute() == null); + } + void GetFields(object o, List<(FieldInfo, SerializedProperty)> infos, SerializedProperty prop = null) { if (o == null) @@ -264,7 +295,7 @@ public virtual void OnInspectorGUI() // Display every field as-is foreach (var parameter in m_Parameters) { - if (parameter.displayName.text != "") + if (!string.IsNullOrEmpty(parameter.displayName.text)) PropertyField(parameter.param, parameter.displayName); else PropertyField(parameter.param); @@ -281,21 +312,68 @@ public virtual string GetDisplayTitle() return target.displayName == "" ? ObjectNames.NicifyVariableName(target.GetType().Name) : target.displayName; } + void AddToogleState(GUIContent content, bool state) + { + bool allOverridesSameState = AreOverridesTo(state); + if (GUILayout.Toggle(allOverridesSameState, content, CoreEditorStyles.miniLabelButton, GUILayout.ExpandWidth(false)) && !allOverridesSameState) + SetOverridesTo(state); + } + void TopRowFields() { using (new EditorGUILayout.HorizontalScope()) { - if (GUILayout.Button(EditorGUIUtility.TrTextContent("All", "Toggle all overrides on. To maximize performances you should only toggle overrides that you actually need."), CoreEditorStyles.miniLabelButton, GUILayout.Width(17f), GUILayout.ExpandWidth(false))) - SetAllOverridesTo(true); + AddToogleState(Styles.allText, true); + AddToogleState(Styles.noneText, false); + } + } + + /// + /// Checks if all the visible parameters have the given state + /// + /// The state to check + internal bool AreOverridesTo(bool state) + { + if (hasAdvancedMode && isInAdvancedMode) + return AreAllOverridesTo(state); + + for (int i = 0; i < m_VolumeNotAdditionalParameters.Count; ++i) + { + if (m_VolumeNotAdditionalParameters[i].overrideState != state) + return false; + } + return true; + } + + /// + /// Sets the given state to all the visible parameters + /// + /// The state to check + internal void SetOverridesTo(bool state) + { + if (hasAdvancedMode && isInAdvancedMode) + SetAllOverridesTo(state); + else + { + Undo.RecordObject(target, Styles.toggleAllText); + target.SetOverridesTo(m_VolumeNotAdditionalParameters, state); + serializedObject.Update(); + } + } - if (GUILayout.Button(EditorGUIUtility.TrTextContent("None", "Toggle all overrides off."), CoreEditorStyles.miniLabelButton, GUILayout.Width(32f), GUILayout.ExpandWidth(false))) - SetAllOverridesTo(false); + internal bool AreAllOverridesTo(bool state) + { + for (int i = 0; i < target.parameters.Count; ++i) + { + if (target.parameters[i].overrideState != state) + return false; } + return true; } internal void SetAllOverridesTo(bool state) { - Undo.RecordObject(target, "Toggle All"); + Undo.RecordObject(target, Styles.toggleAllText); target.SetAllOverridesTo(state); serializedObject.Update(); } @@ -324,36 +402,50 @@ protected void PropertyField(SerializedDataParameter property) } /// - /// Draws a given in the editor using a custom label - /// and tooltip. + /// Handles unity built-in decorators (Space, Header, Tooltips, ...) from attributes /// - /// The property to draw in the editor. - /// A custom label and/or tooltip. - protected void PropertyField(SerializedDataParameter property, GUIContent title) + /// The property to obtain the attributes and handle the decorators + /// A custom label and/or tooltip that might be updated by and/or by + void HandleDecorators(SerializedDataParameter property, GUIContent title) { - // Handle unity built-in decorators (Space, Header, Tooltip etc) foreach (var attr in property.attributes) { - if (attr is PropertyAttribute) + if (!(attr is PropertyAttribute)) + continue; + + switch (attr) { - if (attr is SpaceAttribute) + case SpaceAttribute spaceAttribute: + EditorGUILayout.GetControlRect(false, spaceAttribute.height); + break; + case HeaderAttribute headerAttribute: { - EditorGUILayout.GetControlRect(false, (attr as SpaceAttribute).height); + var rect = EditorGUI.IndentedRect(EditorGUILayout.GetControlRect(false, EditorGUIUtility.singleLineHeight)); + EditorGUI.LabelField(rect, headerAttribute.header, EditorStyles.miniLabel); + break; } - else if (attr is HeaderAttribute) - { - var rect = EditorGUILayout.GetControlRect(false, EditorGUIUtility.singleLineHeight); - rect.y += 0f; - rect = EditorGUI.IndentedRect(rect); - EditorGUI.LabelField(rect, (attr as HeaderAttribute).header, EditorStyles.miniLabel); - } - else if (attr is TooltipAttribute) + case TooltipAttribute tooltipAttribute: { if (string.IsNullOrEmpty(title.tooltip)) - title.tooltip = (attr as TooltipAttribute).tooltip; + title.tooltip = tooltipAttribute.tooltip; + break; } + case InspectorNameAttribute inspectorNameAttribute: + title.text = inspectorNameAttribute.displayName; + break; } } + } + + /// + /// Draws a given in the editor using a custom label + /// and tooltip. + /// + /// The property to draw in the editor. + /// A custom label and/or tooltip. + protected void PropertyField(SerializedDataParameter property, GUIContent title) + { + HandleDecorators(property, title); // Custom parameter drawer VolumeParameterDrawer drawer; @@ -423,9 +515,15 @@ protected void PropertyField(SerializedDataParameter property, GUIContent title) /// The property to draw the override checkbox for protected void DrawOverrideCheckbox(SerializedDataParameter property) { - var overrideRect = GUILayoutUtility.GetRect(17f, 17f, GUILayout.ExpandWidth(false)); - overrideRect.yMin += 4f; - property.overrideState.boolValue = GUI.Toggle(overrideRect, property.overrideState.boolValue, EditorGUIUtility.TrTextContent("", "Override this setting for this volume."), CoreEditorStyles.smallTickbox); + // Create a rect the height + vspacing of the property that is being overriden + float height = EditorGUI.GetPropertyHeight(property.value) + EditorGUIUtility.standardVerticalSpacing; + var overrideRect = GUILayoutUtility.GetRect(Styles.allText, CoreEditorStyles.miniLabelButton, GUILayout.Height(height), GUILayout.ExpandWidth(false)); + + // also center vertically the checkbox + overrideRect.yMin += height * 0.5f - overrideToggleSize.y * 0.5f; + overrideRect.xMin += overrideToggleSize.x * 0.5f; + + property.overrideState.boolValue = GUI.Toggle(overrideRect, property.overrideState.boolValue, Styles.overrideSettingText, CoreEditorStyles.smallTickbox); } } } diff --git a/Editor/Volume/VolumeComponentListEditor.cs b/Editor/Volume/VolumeComponentListEditor.cs index d7bbea0..fd5f695 100644 --- a/Editor/Volume/VolumeComponentListEditor.cs +++ b/Editor/Volume/VolumeComponentListEditor.cs @@ -342,17 +342,17 @@ internal void AddComponent(Type type) var componentProp = m_ComponentsProperty.GetArrayElementAtIndex(m_ComponentsProperty.arraySize - 1); componentProp.objectReferenceValue = component; + // Create & store the internal editor object for this effect + CreateEditor(component, componentProp, forceOpen: true); + + m_SerializedObject.ApplyModifiedProperties(); + // Force save / refresh if (EditorUtility.IsPersistent(asset)) { EditorUtility.SetDirty(asset); AssetDatabase.SaveAssets(); } - - // Create & store the internal editor object for this effect - CreateEditor(component, componentProp, forceOpen: true); - - m_SerializedObject.ApplyModifiedProperties(); } internal void RemoveComponent(int id) diff --git a/Runtime/Camera/FreeCamera.cs b/Runtime/Camera/FreeCamera.cs index 293aa38..59dd13d 100644 --- a/Runtime/Camera/FreeCamera.cs +++ b/Runtime/Camera/FreeCamera.cs @@ -151,7 +151,7 @@ void UpdateInputs() inputRotateAxisX += (Input.GetAxis(kRightStickX) * m_LookSpeedController * Time.deltaTime); inputRotateAxisY += (Input.GetAxis(kRightStickY) * m_LookSpeedController * Time.deltaTime); - leftShift = Input.GetKeyDown(KeyCode.LeftShift); + leftShift = Input.GetKey(KeyCode.LeftShift); fire1 = Input.GetAxis("Fire1") > 0.0f; inputChangeSpeed = Input.GetAxis(kSpeedAxis); @@ -192,10 +192,8 @@ void Update() transform.localRotation = Quaternion.Euler(newRotationX, newRotationY, transform.localEulerAngles.z); float moveSpeed = Time.deltaTime * m_MoveSpeed; - if (leftShiftBoost) - moveSpeed *= leftShift ? m_Turbo : 1.0f; - else - moveSpeed *= fire1 ? m_Turbo : 1.0f; + if (fire1 || leftShiftBoost && leftShift) + moveSpeed *= m_Turbo; transform.position += transform.forward * moveSpeed * inputVertical; transform.position += transform.right * moveSpeed * inputHorizontal; transform.position += Vector3.up * moveSpeed * inputYAxis; diff --git a/Runtime/Common/CoreAttributes.cs b/Runtime/Common/CoreAttributes.cs index 29f5777..c06a309 100644 --- a/Runtime/Common/CoreAttributes.cs +++ b/Runtime/Common/CoreAttributes.cs @@ -13,4 +13,12 @@ public class DisplayInfoAttribute : Attribute /// Display order used in UI. public int order; } + + /// + /// Attribute used to customize UI display to allow properties only be visible when "Show Additional Properties" is selected + /// + [AttributeUsage(AttributeTargets.Field)] + public class AdditionalPropertyAttribute : Attribute + { + } } diff --git a/Runtime/RenderGraph/RenderGraph.cs b/Runtime/RenderGraph/RenderGraph.cs index 6537bc9..4058303 100644 --- a/Runtime/RenderGraph/RenderGraph.cs +++ b/Runtime/RenderGraph/RenderGraph.cs @@ -556,6 +556,8 @@ public void Begin(in RenderGraphParameters parameters) m_ImmediateModeResourceList[i].Clear(); } + + m_Resources.BeginExecute(m_CurrentFrameIndex); } } diff --git a/Runtime/RenderGraph/RenderGraphResourceComputeBuffer.cs b/Runtime/RenderGraph/RenderGraphResourceComputeBuffer.cs index 6455ba5..15c167c 100644 --- a/Runtime/RenderGraph/RenderGraphResourceComputeBuffer.cs +++ b/Runtime/RenderGraph/RenderGraphResourceComputeBuffer.cs @@ -203,7 +203,7 @@ override public void PurgeUnusedResources(int currentFrameIndex) var list = kvp.Value; list.RemoveAll(obj => { - if (obj.frameIndex < s_CurrentFrameIndex) + if (ShouldReleaseResource(obj.frameIndex, s_CurrentFrameIndex)) { obj.resource.Release(); return true; diff --git a/Runtime/RenderGraph/RenderGraphResourcePool.cs b/Runtime/RenderGraph/RenderGraphResourcePool.cs index bfc3f65..cf34b33 100644 --- a/Runtime/RenderGraph/RenderGraphResourcePool.cs +++ b/Runtime/RenderGraph/RenderGraphResourcePool.cs @@ -21,6 +21,7 @@ abstract class RenderGraphResourcePool : IRenderGraphResourcePool where Ty List<(int, Type)> m_FrameAllocatedResources = new List<(int, Type)>(); protected static int s_CurrentFrameIndex; + const int kStaleResourceLifetime = 10; // Release the GPU resource itself protected abstract void ReleaseInternalResource(Type res); @@ -120,8 +121,23 @@ public override void LogResources(RenderGraphLogger logger) allocationList.Sort((a, b) => a.size < b.size ? 1 : -1); int index = 0; + float total = 0; foreach (var element in allocationList) - logger.LogLine("[{0}]\t[{1:#.##} MB]\t{2}", index++, element.size / 1024.0f, element.name); + { + float size = element.size / (1024.0f * 1024.0f); + total += size; + logger.LogLine($"[{index++:D2}]\t[{size:0.00} MB]\t{element.name}"); + } + + logger.LogLine($"\nTotal Size [{total:0.00}]"); + } + + static protected bool ShouldReleaseResource(int lastUsedFrameIndex, int currentFrameIndex) + { + // We need to have a delay of a few frames before releasing resources for good. + // Indeed, when having multiple off-screen cameras, they are rendered in a separate SRP render call and thus with a different frame index than main camera + // This causes texture to be deallocated/reallocated every frame if the two cameras don't need the same buffers. + return (lastUsedFrameIndex + kStaleResourceLifetime) < currentFrameIndex; } } } diff --git a/Runtime/RenderGraph/RenderGraphResourceTexture.cs b/Runtime/RenderGraph/RenderGraphResourceTexture.cs index dd2056e..768763c 100644 --- a/Runtime/RenderGraph/RenderGraphResourceTexture.cs +++ b/Runtime/RenderGraph/RenderGraphResourceTexture.cs @@ -406,7 +406,7 @@ override public void PurgeUnusedResources(int currentFrameIndex) var list = kvp.Value; list.RemoveAll(obj => { - if (obj.frameIndex < s_CurrentFrameIndex) + if (ShouldReleaseResource(obj.frameIndex, s_CurrentFrameIndex)) { obj.resource.Release(); return true; diff --git a/Runtime/Textures/RTHandleSystem.cs b/Runtime/Textures/RTHandleSystem.cs index 3daec6b..4ef9424 100644 --- a/Runtime/Textures/RTHandleSystem.cs +++ b/Runtime/Textures/RTHandleSystem.cs @@ -320,11 +320,13 @@ void DemandResize(RTHandle rth) rt.width, rt.height, rt.volumeDepth, - rt.format, + rt.graphicsFormat, + rt.dimension, rth.m_Name, mips: rt.useMipMap, enableMSAA: rth.m_EnableMSAA, - msaaSamples: m_ScaledRTCurrentMSAASamples + msaaSamples: m_ScaledRTCurrentMSAASamples, + dynamicRes: rt.useDynamicScale ); // Create the new texture @@ -412,7 +414,7 @@ void Resize(int width, int height, MSAASamples msaaSamples, bool sizeChanged, bo } // Regenerate the name - renderTexture.name = CoreUtils.GetRenderTargetAutoName(renderTexture.width, renderTexture.height, renderTexture.volumeDepth, renderTexture.format, rth.m_Name, mips: renderTexture.useMipMap, enableMSAA: rth.m_EnableMSAA, msaaSamples: m_ScaledRTCurrentMSAASamples); + renderTexture.name = CoreUtils.GetRenderTargetAutoName(renderTexture.width, renderTexture.height, renderTexture.volumeDepth, renderTexture.graphicsFormat, renderTexture.dimension, rth.m_Name, mips: renderTexture.useMipMap, enableMSAA: rth.m_EnableMSAA, msaaSamples: m_ScaledRTCurrentMSAASamples, dynamicRes: renderTexture.useDynamicScale); // Create the render texture renderTexture.Create(); @@ -513,7 +515,7 @@ void Resize(int width, int height, MSAASamples msaaSamples, bool sizeChanged, bo bindTextureMS = bindTextureMS, useDynamicScale = m_HardwareDynamicResRequested && useDynamicScale, memorylessMode = memoryless, - name = CoreUtils.GetRenderTargetAutoName(width, height, slices, colorFormat, name, mips: useMipMap, enableMSAA: enableMSAA, msaaSamples: msaaSamples) + name = CoreUtils.GetRenderTargetAutoName(width, height, slices, colorFormat, dimension, name, mips: useMipMap, enableMSAA: enableMSAA, msaaSamples: msaaSamples, dynamicRes: useDynamicScale) }; } @@ -769,7 +771,7 @@ string name useDynamicScale = m_HardwareDynamicResRequested && useDynamicScale, memorylessMode = memoryless, stencilFormat = stencilFormat, - name = CoreUtils.GetRenderTargetAutoName(width, height, slices, colorFormat, name, mips: useMipMap, enableMSAA: allocForMSAA, msaaSamples: m_ScaledRTCurrentMSAASamples) + name = CoreUtils.GetRenderTargetAutoName(width, height, slices, colorFormat, dimension, name, mips: useMipMap, enableMSAA: allocForMSAA, msaaSamples: m_ScaledRTCurrentMSAASamples, dynamicRes: useDynamicScale) }; } else @@ -790,7 +792,7 @@ string name bindTextureMS = bindTextureMS, useDynamicScale = m_HardwareDynamicResRequested && useDynamicScale, memorylessMode = memoryless, - name = CoreUtils.GetRenderTargetAutoName(width, height, slices, colorFormat, name, mips: useMipMap, enableMSAA: allocForMSAA, msaaSamples: m_ScaledRTCurrentMSAASamples) + name = CoreUtils.GetRenderTargetAutoName(width, height, slices, colorFormat, dimension, name, mips: useMipMap, enableMSAA: allocForMSAA, msaaSamples: m_ScaledRTCurrentMSAASamples, dynamicRes: useDynamicScale) }; } diff --git a/Runtime/Textures/TextureXR.cs b/Runtime/Textures/TextureXR.cs index afe525c..72bab51 100644 --- a/Runtime/Textures/TextureXR.cs +++ b/Runtime/Textures/TextureXR.cs @@ -39,6 +39,7 @@ public static bool useTexArray case GraphicsDeviceType.Direct3D11: case GraphicsDeviceType.Direct3D12: case GraphicsDeviceType.PlayStation4: + case GraphicsDeviceType.PlayStation5: case GraphicsDeviceType.Vulkan: return true; diff --git a/Runtime/Utilities/CameraCaptureBridge.cs b/Runtime/Utilities/CameraCaptureBridge.cs index 2e8018a..162b9d4 100644 --- a/Runtime/Utilities/CameraCaptureBridge.cs +++ b/Runtime/Utilities/CameraCaptureBridge.cs @@ -1,18 +1,5 @@ -#if UNITY_EDITOR -#define USE_REFLECTION -#endif - using System; using System.Collections.Generic; -#if UNITY_EDITOR -#if USE_REFLECTION -using System.Reflection; - -#else -using UnityEditor.Recorder; -#endif -#endif - namespace UnityEngine.Rendering { @@ -21,49 +8,11 @@ namespace UnityEngine.Rendering /// public static class CameraCaptureBridge { -#if USE_REFLECTION - private static FieldInfo m_Enabled; - private static MethodInfo m_GetActions; -#endif - private static Dictionary>> actionDict = new Dictionary>>(); private static bool _enabled; - static CameraCaptureBridge() - { -#if USE_REFLECTION - const string optionsClassName = "UnityEditor.Recorder.Options"; - const string editorDllName = "Unity.Recorder.Editor"; - var optionsType = Type.GetType(optionsClassName + ", " + editorDllName); - if (optionsType == null) - return; - - const string useCameraCaptureCallbacksFieldName = "useCameraCaptureCallbacks"; - var useCameraCaptureCallbacksField = optionsType.GetField( - useCameraCaptureCallbacksFieldName, - BindingFlags.Public | BindingFlags.Static); - if (useCameraCaptureCallbacksField == null) - return; - - const string captureClassName = "UnityEditor.Recorder.Input.CameraCapture"; - var captureType = Type.GetType(captureClassName + ", " + editorDllName); - if (captureType == null) - return; - - const string getActionsMethodName = "GetActions"; - var getActionsMethod = captureType.GetMethod( - getActionsMethodName, - BindingFlags.Public | BindingFlags.Static); - if (getActionsMethod == null) - return; - - m_Enabled = useCameraCaptureCallbacksField; - m_GetActions = getActionsMethod; -#endif - } - /// /// Enable camera capture. /// @@ -71,23 +20,10 @@ public static bool enabled { get { - return -#if USE_REFLECTION - m_Enabled == null ? _enabled : (bool)m_Enabled.GetValue(null) -#elif UNITY_EDITOR - UnityEditor.Recorder.Options.useCameraCaptureCallbacks -#else - _enabled -#endif - ; + return _enabled; } set { -#if USE_REFLECTION - m_Enabled?.SetValue(null, value); -#elif UNITY_EDITOR - UnityEditor.Recorder.Options.useCameraCaptureCallbacks = value; -#endif _enabled = value; } } @@ -99,19 +35,6 @@ public static bool enabled /// Enumeration of actions public static IEnumerator> GetCaptureActions(Camera camera) { -#if USE_REFLECTION - if (m_GetActions != null) - { - var recorderActions = (m_GetActions.Invoke(null, new object[] { camera }) as IEnumerator>); - if (recorderActions != null) - return recorderActions; - } -#elif UNITY_EDITOR - var recorderActions = UnityEditor.Recorder.Input.CameraCapture.GetActions(camera); - if (recorderActions != null) - return recorderActions; -#endif - if (!actionDict.TryGetValue(camera, out var actions)) return null; diff --git a/Runtime/Utilities/CoreRenderPipelinePreferences.cs b/Runtime/Utilities/CoreRenderPipelinePreferences.cs index 5226960..3f8d2a8 100644 --- a/Runtime/Utilities/CoreRenderPipelinePreferences.cs +++ b/Runtime/Utilities/CoreRenderPipelinePreferences.cs @@ -10,12 +10,19 @@ namespace UnityEngine.Rendering #if UNITY_EDITOR using UnityEditor; - + class Styles + { + public static readonly GUIContent userDefaults = EditorGUIUtility.TrTextContent("Use Defaults"); + } public static class CoreRenderPipelinePreferences { static bool m_Loaded = false; - static Color s_VolumeGizmoColor = new Color(0.2f, 0.8f, 0.1f, 0.5f); + // Added default Colors so that they can be reverted back to these values + static Color s_VolumeGizmoColorDefault = new Color(0.2f, 0.8f, 0.1f, 0.5f); + static Color s_VolumeGizmoColor = s_VolumeGizmoColorDefault; + static readonly Color kPreviewCameraBackgroundColorDefault = new Color(82f / 255.0f, 82f / 255.0f, 82.0f / 255.0f, 0.0f); + public static Color volumeGizmoColor { get => s_VolumeGizmoColor; @@ -27,6 +34,8 @@ public static Color volumeGizmoColor } } + public static Color previewBackgroundColor => kPreviewCameraBackgroundColorDefault; + static class Keys { internal const string volumeGizmoColor = "CoreRP.Volume.GizmoColor"; @@ -35,18 +44,31 @@ static class Keys [SettingsProvider] static SettingsProvider PreferenceGUI() { - return new SettingsProvider("Preferences/Core Render Pipeline", SettingsScope.User) + return new SettingsProvider("Preferences/Colors/SRP", SettingsScope.User) { guiHandler = searchContext => { if (!m_Loaded) Load(); - volumeGizmoColor = EditorGUILayout.ColorField("Volume Gizmo Color", volumeGizmoColor); + Rect r = EditorGUILayout.GetControlRect(); + r.xMin = 10; + EditorGUIUtility.labelWidth = 251; + volumeGizmoColor = EditorGUI.ColorField(r, "Volume Gizmo Color", volumeGizmoColor); + + if (GUILayout.Button(Styles.userDefaults, GUILayout.Width(120))) + { + RevertColors(); + } } }; } + static void RevertColors() + { + volumeGizmoColor = s_VolumeGizmoColorDefault; + } + static CoreRenderPipelinePreferences() { Load(); @@ -55,7 +77,6 @@ static CoreRenderPipelinePreferences() static void Load() { s_VolumeGizmoColor = GetColor(Keys.volumeGizmoColor, new Color(0.2f, 0.8f, 0.1f, 0.5f)); - m_Loaded = true; } diff --git a/Runtime/Utilities/CoreUtils.cs b/Runtime/Utilities/CoreUtils.cs index 4287f1d..1eccbba 100644 --- a/Runtime/Utilities/CoreUtils.cs +++ b/Runtime/Utilities/CoreUtils.cs @@ -579,7 +579,7 @@ public static void SetViewport(CommandBuffer cmd, RTHandle target) /// Number of MSAA samples. /// Generated names bassed on the provided parameters. public static string GetRenderTargetAutoName(int width, int height, int depth, RenderTextureFormat format, string name, bool mips = false, bool enableMSAA = false, MSAASamples msaaSamples = MSAASamples.None) - => GetRenderTargetAutoName(width, height, depth, format.ToString(), name, mips, enableMSAA, msaaSamples); + => GetRenderTargetAutoName(width, height, depth, format.ToString(), TextureDimension.None, name, mips, enableMSAA, msaaSamples, dynamicRes: false); /// /// Generate a name based on render texture parameters. @@ -594,9 +594,26 @@ public static string GetRenderTargetAutoName(int width, int height, int depth, R /// Number of MSAA samples. /// Generated names bassed on the provided parameters. public static string GetRenderTargetAutoName(int width, int height, int depth, GraphicsFormat format, string name, bool mips = false, bool enableMSAA = false, MSAASamples msaaSamples = MSAASamples.None) - => GetRenderTargetAutoName(width, height, depth, format.ToString(), name, mips, enableMSAA, msaaSamples); + => GetRenderTargetAutoName(width, height, depth, format.ToString(), TextureDimension.None, name, mips, enableMSAA, msaaSamples, dynamicRes: false); - static string GetRenderTargetAutoName(int width, int height, int depth, string format, string name, bool mips = false, bool enableMSAA = false, MSAASamples msaaSamples = MSAASamples.None) + /// + /// Generate a name based on render texture parameters. + /// + /// With of the texture. + /// Height of the texture. + /// Depth of the texture. + /// Graphics format of the render texture. + /// Dimension of the texture. + /// Base name of the texture. + /// True if the texture has mip maps. + /// True if the texture is multisampled. + /// Number of MSAA samples. + /// True if the texture uses dynamic resolution. + /// Generated names bassed on the provided parameters. + public static string GetRenderTargetAutoName(int width, int height, int depth, GraphicsFormat format, TextureDimension dim, string name, bool mips = false, bool enableMSAA = false, MSAASamples msaaSamples = MSAASamples.None, bool dynamicRes = false) + => GetRenderTargetAutoName(width, height, depth, format.ToString(), dim, name, mips, enableMSAA, msaaSamples, dynamicRes); + + static string GetRenderTargetAutoName(int width, int height, int depth, string format, TextureDimension dim, string name, bool mips, bool enableMSAA, MSAASamples msaaSamples, bool dynamicRes) { string result = string.Format("{0}_{1}x{2}", name, width, height); @@ -608,9 +625,15 @@ static string GetRenderTargetAutoName(int width, int height, int depth, string f result = string.Format("{0}_{1}", result, format); + if (dim != TextureDimension.None) + result = string.Format("{0}_{1}", result, dim); + if (enableMSAA) result = string.Format("{0}_{1}", result, msaaSamples.ToString()); + if (dynamicRes) + result = string.Format("{0}_{1}", result, "dynamic"); + return result; } diff --git a/Runtime/Volume/VolumeComponent.cs b/Runtime/Volume/VolumeComponent.cs index 499f299..ad134a1 100644 --- a/Runtime/Volume/VolumeComponent.cs +++ b/Runtime/Volume/VolumeComponent.cs @@ -81,7 +81,10 @@ public class VolumeComponent : ScriptableObject /// /// Extracts all the s defined in this class and nested classes. /// - static void GetParameters(object o, List parameters) + /// The object to find the parameters + /// The list filled with the parameters. + /// If you want to filter the parameters + internal static void FindParameters(object o, List parameters, Func filter = null) { if (o == null) return; @@ -93,9 +96,12 @@ static void GetParameters(object o, List parameters) foreach (var field in fields) { if (field.FieldType.IsSubclassOf(typeof(VolumeParameter))) - parameters.Add((VolumeParameter)field.GetValue(o)); + { + if (filter?.Invoke(field) ?? true) + parameters.Add((VolumeParameter)field.GetValue(o)); + } else if (!field.FieldType.IsArray && field.FieldType.IsClass) - GetParameters(field.GetValue(o), parameters); + FindParameters(field.GetValue(o), parameters, filter); } } @@ -109,7 +115,7 @@ protected virtual void OnEnable() { // Automatically grab all fields of type VolumeParameter for this instance var fields = new List(); - GetParameters(this, fields); + FindParameters(this, fields); parameters = fields.AsReadOnly(); foreach (var parameter in parameters) @@ -195,10 +201,14 @@ public virtual void Override(VolumeComponent state, float interpFactor) /// The value to set the state of the overrides to. public void SetAllOverridesTo(bool state) { - SetAllOverridesTo(parameters, state); + SetOverridesTo(parameters, state); } - void SetAllOverridesTo(IEnumerable enumerable, bool state) + /// + /// Sets the override state of the given parameters on this component to a given value. + /// + /// The value to set the state of the overrides to. + internal void SetOverridesTo(IEnumerable enumerable, bool state) { foreach (var prop in enumerable) { @@ -213,7 +223,7 @@ void SetAllOverridesTo(IEnumerable enumerable, bool state) .GetValue(prop, null); if (innerParams != null) - SetAllOverridesTo(innerParams, state); + SetOverridesTo(innerParams, state); } } } diff --git a/ShaderLibrary/Common.hlsl b/ShaderLibrary/Common.hlsl index 64c414d..f3c2e25 100644 --- a/ShaderLibrary/Common.hlsl +++ b/ShaderLibrary/Common.hlsl @@ -158,10 +158,14 @@ #endif // Include language header -#if defined(SHADER_API_XBOXONE) +#if defined (SHADER_API_GAMECORE) +#include "Packages/com.unity.render-pipelines.gamecore/ShaderLibrary/API/GameCore.hlsl" +#elif defined(SHADER_API_XBOXONE) #include "Packages/com.unity.render-pipelines.xboxone/ShaderLibrary/API/XBoxOne.hlsl" -#elif defined(SHADER_API_PSSL) +#elif defined(SHADER_API_PS4) #include "Packages/com.unity.render-pipelines.ps4/ShaderLibrary/API/PSSL.hlsl" +#elif defined(SHADER_API_PS5) +#include "Packages/com.unity.render-pipelines.ps5/ShaderLibrary/API/PSSL.hlsl" #elif defined(SHADER_API_D3D11) #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/API/D3D11.hlsl" #elif defined(SHADER_API_METAL) @@ -1319,7 +1323,7 @@ void LODDitheringTransition(uint2 fadeMaskSeed, float ditherFactor) // while on other APIs is in the red channel. Note that on some platform, always using the green channel might work, but is not guaranteed. uint GetStencilValue(uint2 stencilBufferVal) { -#if defined(SHADER_API_D3D11) || defined(SHADER_API_XBOXONE) +#if defined(SHADER_API_D3D11) || defined(SHADER_API_XBOXONE) || defined(SHADER_API_GAMECORE) return stencilBufferVal.y; #else return stencilBufferVal.x; diff --git a/ShaderLibrary/GeometricTools.hlsl b/ShaderLibrary/GeometricTools.hlsl index 289cb6a..c9e2e14 100644 --- a/ShaderLibrary/GeometricTools.hlsl +++ b/ShaderLibrary/GeometricTools.hlsl @@ -182,6 +182,15 @@ bool IntersectRayCone(float3 rayOrigin, float3 rayDirection, return hit; } +bool IntersectSphereAABB(float3 position, float radius, float3 aabbMin, float3 aabbMax) +{ + float x = max(aabbMin.x, min(position.x, aabbMax.x)); + float y = max(aabbMin.y, min(position.y, aabbMax.y)); + float z = max(aabbMin.z, min(position.z, aabbMax.z)); + float distance2 = ((x - position.x) * (x - position.x) + (y - position.y) * (y - position.y) + (z - position.z) * (z - position.z)); + return distance2 < radius * radius; +} + //----------------------------------------------------------------------------- // Miscellaneous functions //----------------------------------------------------------------------------- @@ -225,9 +234,12 @@ bool CullTriangleFrustum(float3 p0, float3 p1, float3 p2, float epsilon, float4 // Returns 'true' if the edge of the triangle is outside of the frustum. // The edges are defined s.t. they are on the opposite side of the point with the given index. // 'epsilon' is the (negative) distance to (outside of) the frustum below which we cull the triangle. -bool3 CullTriangleEdgesFrustum(float3 p0, float3 p1, float3 p2, float epsilon, float4 frustumPlanes[6], int numPlanes) +//output packing: +// x,y,z - one component per triangle edge, true if outside, false otherwise +// w - true if entire triangle is outside of at least 1 plane of the frustum, false otherwise +bool4 CullFullTriangleAndEdgesFrustum(float3 p0, float3 p1, float3 p2, float epsilon, float4 frustumPlanes[6], int numPlanes) { - bool3 edgesOutside = false; + bool4 edgesOutsideXYZ_triangleOutsideW = false; for (int i = 0; i < numPlanes; i++) { @@ -235,13 +247,26 @@ bool3 CullTriangleEdgesFrustum(float3 p0, float3 p1, float3 p2, float epsilon, f DistanceFromPlane(p1, frustumPlanes[i]) < epsilon, DistanceFromPlane(p2, frustumPlanes[i]) < epsilon); - // If both points of the edge are behind any of the planes, we cull. - edgesOutside.x = edgesOutside.x || (pointsOutside.y && pointsOutside.z); - edgesOutside.y = edgesOutside.y || (pointsOutside.x && pointsOutside.z); - edgesOutside.z = edgesOutside.z || (pointsOutside.x && pointsOutside.y); + bool3 edgesOutside; + // If both points of the edge are behind any of the planes, we cull. + edgesOutside.x = pointsOutside.y && pointsOutside.z; + edgesOutside.y = pointsOutside.x && pointsOutside.z; + edgesOutside.z = pointsOutside.x && pointsOutside.y; + + edgesOutsideXYZ_triangleOutsideW = edgesOutsideXYZ_triangleOutsideW || bool4(edgesOutside.xyz, all(pointsOutside)); } - return edgesOutside; + return edgesOutsideXYZ_triangleOutsideW; +} + +// Returns 'true' if the edge of the triangle is outside of the frustum. +// The edges are defined s.t. they are on the opposite side of the point with the given index. +// 'epsilon' is the (negative) distance to (outside of) the frustum below which we cull the triangle. +//output packing: +// x,y,z - one component per triangle edge, true if outside, false otherwise +bool3 CullTriangleEdgesFrustum(float3 p0, float3 p1, float3 p2, float epsilon, float4 frustumPlanes[6], int numPlanes) +{ + return CullFullTriangleAndEdgesFrustum(p0, p1, p2, epsilon, frustumPlanes, numPlanes).xyz; } bool CullTriangleBackFaceView(float3 p0, float3 p1, float3 p2, float epsilon, float3 V, float winding) diff --git a/ShaderLibrary/Texture.hlsl b/ShaderLibrary/Texture.hlsl index 7b75658..7a5ac40 100644 --- a/ShaderLibrary/Texture.hlsl +++ b/ShaderLibrary/Texture.hlsl @@ -45,7 +45,7 @@ struct UnityTexture2D float4 Sample(UnitySamplerState s, float2 uv) { return SAMPLE_TEXTURE2D(tex, s.samplerstate, uv); } float4 SampleLevel(UnitySamplerState s, float2 uv, float lod) { return SAMPLE_TEXTURE2D_LOD(tex, s.samplerstate, uv, lod); } float4 SampleBias(UnitySamplerState s, float2 uv, float bias) { return SAMPLE_TEXTURE2D_BIAS(tex, s.samplerstate, uv, bias); } - float4 SampleGrad(UnitySamplerState s, float2 uv, float dpdx, float dpdy) { return SAMPLE_TEXTURE2D_GRAD(tex, s.samplerstate, uv, dpdx, dpdy); } + float4 SampleGrad(UnitySamplerState s, float2 uv, float2 dpdx, float2 dpdy) { return SAMPLE_TEXTURE2D_GRAD(tex, s.samplerstate, uv, dpdx, dpdy); } #ifndef SHADER_API_GLES float CalculateLevelOfDetail(UnitySamplerState s, float2 uv) { return CALCULATE_TEXTURE2D_LOD(tex, s.samplerstate, uv); } @@ -53,7 +53,7 @@ struct UnityTexture2D float4 Sample(SAMPLER(s), float2 uv) { return SAMPLE_TEXTURE2D(tex, s, uv); } float4 SampleLevel(SAMPLER(s), float2 uv, float lod) { return SAMPLE_TEXTURE2D_LOD(tex, s, uv, lod); } float4 SampleBias(SAMPLER(s), float2 uv, float bias) { return SAMPLE_TEXTURE2D_BIAS(tex, s, uv, bias); } - float4 SampleGrad(SAMPLER(s), float2 uv, float dpdx, float dpdy) { return SAMPLE_TEXTURE2D_GRAD(tex, s, uv, dpdx, dpdy); } + float4 SampleGrad(SAMPLER(s), float2 uv, float2 dpdx, float2 dpdy) { return SAMPLE_TEXTURE2D_GRAD(tex, s, uv, dpdx, dpdy); } float4 SampleCmpLevelZero(SAMPLER_CMP(s), float2 uv, float cmp) { return SAMPLE_TEXTURE2D_SHADOW(tex, s, float3(uv, cmp)); } float4 Load(int3 pixel) { return LOAD_TEXTURE2D_LOD(tex, pixel.xy, pixel.z); } float CalculateLevelOfDetail(SAMPLER(s), float2 uv) { return CALCULATE_TEXTURE2D_LOD(tex, s, uv); } @@ -103,12 +103,12 @@ struct UnityTexture2DArray float4 Sample(UnitySamplerState s, float3 uv) { return SAMPLE_TEXTURE2D_ARRAY(tex, s.samplerstate, uv.xy, uv.z); } float4 SampleLevel(UnitySamplerState s, float3 uv, float lod) { return SAMPLE_TEXTURE2D_ARRAY_LOD(tex, s.samplerstate, uv.xy, uv.z, lod); } float4 SampleBias(UnitySamplerState s, float3 uv, float bias) { return SAMPLE_TEXTURE2D_ARRAY_BIAS(tex, s.samplerstate, uv.xy, uv.z, bias); } - float4 SampleGrad(UnitySamplerState s, float3 uv, float dpdx, float dpdy) { return SAMPLE_TEXTURE2D_ARRAY_GRAD(tex, s.samplerstate, uv.xy, uv.z, dpdx, dpdy); } + float4 SampleGrad(UnitySamplerState s, float3 uv, float2 dpdx, float2 dpdy) { return SAMPLE_TEXTURE2D_ARRAY_GRAD(tex, s.samplerstate, uv.xy, uv.z, dpdx, dpdy); } float4 Sample(SAMPLER(s), float3 uv) { return SAMPLE_TEXTURE2D_ARRAY(tex, s, uv.xy, uv.z); } float4 SampleLevel(SAMPLER(s), float3 uv, float lod) { return SAMPLE_TEXTURE2D_ARRAY_LOD(tex, s, uv.xy, uv.z, lod); } float4 SampleBias(SAMPLER(s), float3 uv, float bias) { return SAMPLE_TEXTURE2D_ARRAY_BIAS(tex, s, uv.xy, uv.z, bias); } - float4 SampleGrad(SAMPLER(s), float3 uv, float dpdx, float dpdy) { return SAMPLE_TEXTURE2D_ARRAY_GRAD(tex, s, uv.xy, uv.z, dpdx, dpdy); } + float4 SampleGrad(SAMPLER(s), float3 uv, float2 dpdx, float2 dpdy) { return SAMPLE_TEXTURE2D_ARRAY_GRAD(tex, s, uv.xy, uv.z, dpdx, dpdy); } float4 SampleCmpLevelZero(SAMPLER_CMP(s), float3 uv, float cmp) { return SAMPLE_TEXTURE2D_ARRAY_SHADOW(tex, s, float3(uv.xy, cmp), uv.z); } float4 Load(int4 pixel) { return LOAD_TEXTURE2D_ARRAY(tex, pixel.xy, pixel.z); } #endif diff --git a/ShaderLibrary/UnityInstancing.hlsl b/ShaderLibrary/UnityInstancing.hlsl index df3c0a4..a0e0de2 100644 --- a/ShaderLibrary/UnityInstancing.hlsl +++ b/ShaderLibrary/UnityInstancing.hlsl @@ -1,7 +1,7 @@ #ifndef UNITY_INSTANCING_INCLUDED #define UNITY_INSTANCING_INCLUDED -#if SHADER_TARGET >= 35 && (defined(SHADER_API_D3D11) || defined(SHADER_API_GLES3) || defined(SHADER_API_GLCORE) || defined(SHADER_API_XBOXONE) || defined(SHADER_API_PSSL) || defined(SHADER_API_VULKAN) || defined(SHADER_API_METAL)) +#if SHADER_TARGET >= 35 && (defined(SHADER_API_D3D11) || defined(SHADER_API_GLES3) || defined(SHADER_API_GLCORE) || defined(SHADER_API_XBOXONE) || defined(SHADER_API_GAMECORE) || defined(SHADER_API_PSSL) || defined(SHADER_API_VULKAN) || defined(SHADER_API_METAL)) #define UNITY_SUPPORT_INSTANCING #endif diff --git a/ShaderLibrary/Version.hlsl b/ShaderLibrary/Version.hlsl index 1b438bd..7519bfc 100644 --- a/ShaderLibrary/Version.hlsl +++ b/ShaderLibrary/Version.hlsl @@ -1,3 +1,6 @@ +// The old version number system below is deprecated whith Graphic Packages that have move as core package of Unity. +// User should rely on the Macro UNITY_VERSION now to detect which version of Unity is coupled to the current set of pipeline shader +// Example of usage #if UNITY_VERSION >= 202120 to check if the version is above or equal 2021.2 #define SHADER_LIBRARY_VERSION_MAJOR 11 #define SHADER_LIBRARY_VERSION_MINOR 0 diff --git a/Tests/Editor/ReflectionUtils.cs b/Tests/Editor/ReflectionUtils.cs new file mode 100644 index 0000000..d2586d4 --- /dev/null +++ b/Tests/Editor/ReflectionUtils.cs @@ -0,0 +1,79 @@ +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; + +namespace UnityEngine.Rendering.Tests +{ + public static class ReflectionUtils + { + /// + /// Calls a private method from a class + /// + /// The method name + /// The arguments to pass to the method + public static object Invoke(this object target, string methodName, params object[] args) + { + Assert.True(target != null, "The target could not be null"); + Assert.IsNotEmpty(methodName, "The field to set could not be null"); + + var mi = target.GetType().GetMethod(methodName, BindingFlags.NonPublic | BindingFlags.Instance); + Assert.True(mi != null, $"Could not find method `{methodName}` on object `{target}`"); + return mi.Invoke(target, args); + } + + private static FieldInfo FindField(this Type type, string fieldName) + { + FieldInfo fi = null; + + while (type != null) + { + fi = type.GetField(fieldName, BindingFlags.Instance | BindingFlags.NonPublic); + + if (fi != null) break; + + type = type.BaseType; + } + + Assert.True(fi != null, $"Could not find method `{fieldName}` on object `{type}`"); + + return fi; + } + + /// + /// Sets a private field from a class + /// + /// The field to change + /// The new value + public static void SetField(this object target, string fieldName, object value) + { + Assert.True(target != null, "The target could not be null"); + Assert.IsNotEmpty(fieldName, "The field to set could not be null"); + target.GetType().FindField(fieldName).SetValue(target, value); + } + + /// + /// Gets the value of a private field from a class + /// + /// The field to get + public static object GetField(this object target, string fieldName) + { + Assert.True(target != null, "The target could not be null"); + Assert.IsNotEmpty(fieldName, "The field to set could not be null"); + return target.GetType().FindField(fieldName).GetValue(target); + } + + /// + /// Gets all the fields from a class + /// + public static IEnumerable GetFields(this object target) + { + Assert.True(target != null, "The target could not be null"); + + return target.GetType() + .GetFields(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) + .OrderBy(t => t.MetadataToken); + } + } +} diff --git a/Tests/Editor/ReflectionUtils.cs.meta b/Tests/Editor/ReflectionUtils.cs.meta new file mode 100644 index 0000000..1407d39 --- /dev/null +++ b/Tests/Editor/ReflectionUtils.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 994ab27378e49eb45830d39c4c0b5799 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Tests/Editor/Volumes.meta b/Tests/Editor/Volumes.meta new file mode 100644 index 0000000..07a7f25 --- /dev/null +++ b/Tests/Editor/Volumes.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 93ab3f290033d9a48b3cce3d26d93f89 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Tests/Editor/Volumes/VolumeComponentTests.cs b/Tests/Editor/Volumes/VolumeComponentTests.cs new file mode 100644 index 0000000..cbb097c --- /dev/null +++ b/Tests/Editor/Volumes/VolumeComponentTests.cs @@ -0,0 +1,159 @@ +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using UnityEditor.Rendering; + +namespace UnityEngine.Rendering.Tests +{ + public class VolumeComponentEditorTests + { + class VolumeComponentNoAdditionalAttributes : VolumeComponent + { + public MinFloatParameter parameter = new MinFloatParameter(0f, 0f); + } + + class VolumeComponentAllAdditionalAttributes : VolumeComponent + { + [AdditionalProperty] + public MinFloatParameter parameter1 = new MinFloatParameter(0f, 0f); + + [AdditionalProperty] + public FloatParameter parameter2 = new MinFloatParameter(0f, 0f); + } + + class VolumeComponentMixedAdditionalAttributes : VolumeComponent + { + public MinFloatParameter parameter1 = new MinFloatParameter(0f, 0f); + + [AdditionalProperty] + public FloatParameter parameter2 = new MinFloatParameter(0f, 0f); + + public MinFloatParameter parameter3 = new MinFloatParameter(0f, 0f); + + [AdditionalProperty] + public FloatParameter parameter4 = new MinFloatParameter(0f, 0f); + } + + [Test] + public void TestOverridesChanges() + { + var component = ScriptableObject.CreateInstance(); + var editor = (VolumeComponentEditor)Activator.CreateInstance(typeof(VolumeComponentEditor)); + editor.Invoke("Init", component, null); + + component.SetAllOverridesTo(false); + bool allOverridesState = (bool)editor.Invoke("AreAllOverridesTo", false); + Assert.True(allOverridesState); + + component.SetAllOverridesTo(true); + + // Was the change correct? + allOverridesState = (bool)editor.Invoke("AreAllOverridesTo", true); + Assert.True(allOverridesState); + + // Enable the advance mode on the editor + component.SetField("m_AdvancedMode", true); + + // Everything is false + component.SetAllOverridesTo(false); + + // Disable the advance mode on the editor + component.SetField("m_AdvancedMode", false); + + // Now just set to true the overrides of non additional properties + editor.Invoke("SetOverridesTo", true); + + // Check that the non additional properties must be false + allOverridesState = (bool)editor.Invoke("AreAllOverridesTo", true); + Assert.False(allOverridesState); + + ScriptableObject.DestroyImmediate(component); + } + + static TestCaseData[] s_AdditionalAttributesTestCaseDatas = + { + new TestCaseData(typeof(VolumeComponentNoAdditionalAttributes)) + .Returns(Array.Empty()) + .SetName("VolumeComponentNoAdditionalAttributes"), + new TestCaseData(typeof(VolumeComponentAllAdditionalAttributes)) + .Returns(new string[2] {"parameter1", "parameter2"}) + .SetName("VolumeComponentAllAdditionalAttributes"), + new TestCaseData(typeof(VolumeComponentMixedAdditionalAttributes)) + .Returns(new string[2] {"parameter2", "parameter4"}) + .SetName("VolumeComponentMixedAdditionalAttributes"), + }; + + [Test, TestCaseSource(nameof(s_AdditionalAttributesTestCaseDatas))] + public string[] AdditionalProperties(Type volumeComponentType) + { + var component = (VolumeComponent)ScriptableObject.CreateInstance(volumeComponentType); + var editor = (VolumeComponentEditor)Activator.CreateInstance(typeof(VolumeComponentEditor)); + editor.Invoke("Init", component, null); + + var fields = component + .GetFields() + .Where(f => f.GetCustomAttribute() != null) + .Select(f => f.Name) + .ToArray(); + + var notAdditionalParameters = editor.GetField("m_VolumeNotAdditionalParameters") as List; + Assert.True(fields.Count() + notAdditionalParameters.Count == component.parameters.Count); + + ScriptableObject.DestroyImmediate(component); + + return fields; + } + + #region Decorators Handling Test + + class VolumeComponentDecorators : VolumeComponent + { + [Tooltip("Increase to make the noise texture appear bigger and less")] + public FloatParameter _NoiseTileSize = new FloatParameter(25.0f); + + [InspectorName("Color")] + public ColorParameter _FogColor = new ColorParameter(Color.grey); + + [InspectorName("Size and occurrence"), Tooltip("Increase to make patches SMALLER, and frequent")] + public ClampedFloatParameter _HighNoiseSpaceFreq = new ClampedFloatParameter(0.1f, 0.1f, 1f); + } + + readonly (string displayName, string tooltip)[] k_ExpectedResults = + { + (string.Empty, "Increase to make the noise texture appear bigger and less"), + ("Color", string.Empty), + ("Size and occurrence", "Increase to make patches SMALLER, and frequent") + }; + + [Test] + public void TestHandleParameterDecorators() + { + var component = ScriptableObject.CreateInstance(); + var editor = (VolumeComponentEditor)Activator.CreateInstance(typeof(VolumeComponentEditor)); + editor.Invoke("Init", component, null); + + var parameters = + editor.GetField("m_Parameters") as List<(GUIContent displayName, int displayOrder, + SerializedDataParameter param)>; + + Assert.True(parameters != null && parameters.Count() == k_ExpectedResults.Count()); + + for (int i = 0; i < k_ExpectedResults.Count(); ++i) + { + var property = parameters[i].param; + var title = new GUIContent(parameters[i].displayName); + + editor.Invoke("HandleDecorators", property, title); + + Assert.True(k_ExpectedResults[i].displayName == title.text); + Assert.True(k_ExpectedResults[i].tooltip == title.tooltip); + } + + ScriptableObject.DestroyImmediate(component); + } + + #endregion + } +} diff --git a/Tests/Editor/Volumes/VolumeComponentTests.cs.meta b/Tests/Editor/Volumes/VolumeComponentTests.cs.meta new file mode 100644 index 0000000..3c9cb09 --- /dev/null +++ b/Tests/Editor/Volumes/VolumeComponentTests.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 0fd9ee276a1023e439cf7a9c393195fa +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/package.json b/package.json index 0cd977f..82f506d 100644 --- a/package.json +++ b/package.json @@ -6,6 +6,7 @@ "unityRelease": "0a8", "displayName": "Core RP Library", "dependencies": { - "com.unity.ugui": "1.0.0" + "com.unity.ugui": "1.0.0", + "com.unity.modules.physics": "1.0.0" } }