Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions com.unity.render-pipelines.high-definition/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.

### Added
- Added a SG node to get the main directional light direction.
- Added support for orthographic camera in path tracing.

### Changed
- MaterialReimporter.ReimportAllMaterials and MaterialReimporter.ReimportAllHDShaderGraphs now batch the asset database changes to improve performance.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,68 +90,88 @@ void MissMaterial(inout PathIntersection pathIntersection : SV_RayPayload)
ApplyFogAttenuation(WorldRayOrigin(), WorldRayDirection(), pathIntersection.value);
}

void ApplyDepthOfField(uint2 pixelCoord, float dotDirection, inout float3 origin, inout float3 direction)
{
float apertureRadius = _PathTracedDoFConstants.x;

if (apertureRadius <= 0.0)
return;

// Sample the lens aperture using the next available dimensions
// (we use 40 for path tracing, 2 for sub-pixel jittering, 64 for SSS -> 106, 107)
float2 uv = apertureRadius * SampleDiskUniform(GetSample(pixelCoord, _RaytracingSampleIndex, 106),
GetSample(pixelCoord, _RaytracingSampleIndex, 107));

// Compute the focus point by intersecting the pinhole ray with the focus plane
float focusDistance = _PathTracedDoFConstants.y;
float t = focusDistance / dotDirection;
float3 focusPoint = origin + t * direction;

// Compute the new ray origin (_ViewMatrix[0] = right, _ViewMatrix[1] = up)
origin += _ViewMatrix[0].xyz * uv.x + _ViewMatrix[1].xyz * uv.y;

// The new ray direction should pass through the focus point
direction = normalize(focusPoint - origin);
}

[shader("raygeneration")]
void RayGen()
{
uint2 LaunchIndex = DispatchRaysIndex().xy;

// Get the current pixel coordinates
uint2 currentPixelCoord = uint2(LaunchIndex.x, LaunchIndex.y);
uint2 pixelCoord = DispatchRaysIndex().xy;

// Jitter them (we use 4x10 dimensions of our sequence during path tracing atm, so pick the next available ones)
float4 jitteredPixelCoord = float4(currentPixelCoord, 1.0, 1.0);
jitteredPixelCoord.x += GetSample(currentPixelCoord, _RaytracingSampleIndex, 40);
jitteredPixelCoord.y += GetSample(currentPixelCoord, _RaytracingSampleIndex, 41);

// Compute the ray direction from those coordinates (for zero aperture
float3 directionWS = -normalize(mul(jitteredPixelCoord, _PixelCoordToViewDirWS).xyz);
float3 cameraPosWS = GetPrimaryCameraPosition();
float4 jitteredPixelCoord = float4(pixelCoord, 1.0, 1.0);
jitteredPixelCoord.x += GetSample(pixelCoord, _RaytracingSampleIndex, 40);
jitteredPixelCoord.y += GetSample(pixelCoord, _RaytracingSampleIndex, 41);

float apertureRadius = _PathTracedDoFConstants.x;
if (apertureRadius > 0.0)
{
// Compute the ray origin and direction for a lens with non-zero aperture
// Create the ray descriptor for this pixel
RayDesc ray;
ray.TMin = _RaytracingCameraNearPlane;
ray.TMax = FLT_INF;

// Sample the lens apperture using the next available dimensions (we use 40 for path tracing, 2 for sub-pixel jittering, 64 for SSS -> 106, 107)
float r1 = GetSample(currentPixelCoord, _RaytracingSampleIndex, 106);
float r2 = GetSample(currentPixelCoord, _RaytracingSampleIndex, 107);
float2 uv = apertureRadius * SampleDiskUniform(r1, r2);
// We need the camera forward direction in both types of projection
float3 cameraDirection = GetViewForwardDir();

// Compute the new ray origin ( _ViewMatrix[0] = right, _ViewMatrix[1] = up, _ViewMatrix[2] = forward )
cameraPosWS += _ViewMatrix[0].xyz * uv.x + _ViewMatrix[1].xyz * uv.y;
// Compute the ray's origin and direction, for either perspective or orthographic projection
if (IsPerspectiveProjection())
{
ray.Origin = GetPrimaryCameraPosition();
ray.Direction = -normalize(mul(jitteredPixelCoord, _PixelCoordToViewDirWS).xyz);

// Compute the focus point by intersecting the pinhole ray with the focus plane
float focusDistance = _PathTracedDoFConstants.y;
float t = focusDistance / dot(directionWS, _ViewMatrix[2].xyz);
float3 focusPointWS = GetPrimaryCameraPosition() - t * directionWS;
// Use planar clipping, to match rasterization
float dotDirection = dot(cameraDirection, ray.Direction);
ray.TMin /= dotDirection;

// The new ray direction should pass through the focus point
directionWS = normalize(focusPointWS - cameraPosWS);
ApplyDepthOfField(pixelCoord, dotDirection, ray.Origin, ray.Direction);
}
else // Orthographic projection
{
uint2 pixelResolution = DispatchRaysDimensions().xy;
float3 screenCoord = float3(2.0 * jitteredPixelCoord.x / pixelResolution.x - 1.0,
-2.0 * jitteredPixelCoord.y / pixelResolution.y + 1.0,
0.0);

// Create the ray descriptor for this pixel
RayDesc rayDescriptor;
rayDescriptor.Origin = cameraPosWS;
rayDescriptor.Direction = directionWS;
rayDescriptor.TMin = _RaytracingCameraNearPlane;
rayDescriptor.TMax = FLT_INF;
ray.Origin = mul(_InvViewProjMatrix, screenCoord);
ray.Direction = cameraDirection;
}

// Create and init the PathIntersection structure for this
PathIntersection pathIntersection;
pathIntersection.value = 1.0;
pathIntersection.alpha = 1.0;
pathIntersection.remainingDepth = _RaytracingMaxRecursion;
pathIntersection.pixelCoord = currentPixelCoord;
pathIntersection.pixelCoord = pixelCoord;
pathIntersection.maxRoughness = 0.0;

// In order to achieve filtering for the textures, we need to compute the spread angle of the pixel
pathIntersection.cone.spreadAngle = _RaytracingPixelSpreadAngle;
pathIntersection.cone.width = 0.0;

// Evaluate the ray intersection
TraceRay(_RaytracingAccelerationStructure, RAY_FLAG_CULL_BACK_FACING_TRIANGLES, RAYTRACINGRENDERERFLAG_PATH_TRACING, 0, 1, 0, rayDescriptor, pathIntersection);
TraceRay(_RaytracingAccelerationStructure, RAY_FLAG_CULL_BACK_FACING_TRIANGLES, RAYTRACINGRENDERERFLAG_PATH_TRACING, 0, 1, 0, ray, pathIntersection);

_FrameTexture[COORD_TEXTURE2D_X(currentPixelCoord)] = float4(pathIntersection.value, pathIntersection.alpha);
_FrameTexture[COORD_TEXTURE2D_X(pixelCoord)] = float4(pathIntersection.value, pathIntersection.alpha);
}

// This should never be called, return magenta just in case
Expand Down