-
Notifications
You must be signed in to change notification settings - Fork 0
/
SSAO.shader
152 lines (125 loc) · 5 KB
/
SSAO.shader
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
Shader "Hidden/Custom/SSAO"
{
HLSLINCLUDE
#include "Packages/com.unity.postprocessing/PostProcessing/Shaders/StdLib.hlsl"
TEXTURE2D_SAMPLER2D(_MainTex, sampler_MainTex);
TEXTURE2D_SAMPLER2D(_CameraDepthTexture, sampler_CameraDepthTexture);
float _Blend;
float4x4 _ViewMatrix;
float4x4 _ViewProjectionMatrix;
float4x4 _ProjectionMatrix;
float4x4 _InverseViewMatrix;
float4x4 _InverseViewProjectionMatrix;
float4x4 _InverseProjectionMatrix;
float4 _SamplingPoints[64];
float _OcclusionSampleLength;
float _OcclusionMinDistance;
float _OcclusionMaxDistance;
float _OcclusionStrength;
float4 _OcclusionColor;
// ------------------------------------------------------------------------------------------------
float3 ReconstructWorldPositionFromDepth(float2 screenUV, float depth)
{
float4 clipPos = float4(screenUV * 2.0 - 1.0, depth, 1.0);
#if UNITY_UV_STARTS_AT_TOP
clipPos.y = -clipPos.y;
#endif
float4 worldPos = mul(_InverseViewProjectionMatrix, clipPos);
return worldPos.xyz / worldPos.w;
}
float3 ReconstructViewPositionFromDepth(float2 screenUV, float depth)
{
float4 clipPos = float4(screenUV * 2.0 - 1.0, depth, 1.0);
#if UNITY_UV_STARTS_AT_TOP
clipPos.y = -clipPos.y;
#endif
float4 viewPos = mul(_InverseProjectionMatrix, clipPos);
return viewPos.xyz / viewPos.w;
}
float SampleRawDepth(float2 uv)
{
float rawDepth = SAMPLE_DEPTH_TEXTURE_LOD(
_CameraDepthTexture,
sampler_CameraDepthTexture,
UnityStereoTransformScreenSpaceTex(uv),
0
);
return rawDepth;
}
float SampleLinear01Depth(float2 uv)
{
float rawDepth = SampleRawDepth(uv);
float depth = Linear01Depth(rawDepth);
return depth;
}
// ------------------------------------------------------------------------------------------------
float4 Frag(VaryingsDefault i) : SV_Target
{
const int SAMPLE_COUNT = 64;
float4 color = float4(1, 1, 1, 1);
float4 baseColor = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.texcoord);
float rawDepth = SampleRawDepth(i.texcoord);
float depth = Linear01Depth(rawDepth);
float3 worldPosition = ReconstructWorldPositionFromDepth(i.texcoord, rawDepth);
float3 viewPosition = ReconstructViewPositionFromDepth(i.texcoord, rawDepth);
float eps = .0001;
// mask exists depth
if (depth > 1. - eps)
{
return baseColor;
}
int occludedCount = 0;
int divCount = SAMPLE_COUNT;
for (int j = 0; j < SAMPLE_COUNT; j++)
{
float4 offset = _SamplingPoints[j];
offset.w = 0;
// pattern_1: world -> view -> clip
// float4 offsetWorldPosition = float4(worldPosition, 1.) + offset * _OcclusionSampleLength;
// float4 offsetViewPosition = mul(_ViewMatrix, offsetWorldPosition);
// float4 offsetClipPosition = mul(_ViewProjectionMatrix, offsetWorldPosition);
// pattern_2: view -> clip
float4 offsetViewPosition = float4(viewPosition, 1.) + offset * _OcclusionSampleLength;
float4 offsetClipPosition = mul(_ProjectionMatrix, offsetViewPosition);
#if UNITY_UV_STARTS_AT_TOP
offsetClipPosition.y = -offsetClipPosition.y;
#endif
float2 samplingCoord = (offsetClipPosition.xy / offsetClipPosition.w) * 0.5 + 0.5;
float samplingRawDepth = SampleRawDepth(samplingCoord);
float3 samplingViewPosition = ReconstructViewPositionFromDepth(samplingCoord, samplingRawDepth);
// 現在のviewPositionとoffset済みのviewPositionが一定距離離れていたらor近すぎたら無視
float dist = distance(samplingViewPosition.xyz, viewPosition.xyz);
if (dist < _OcclusionMinDistance || _OcclusionMaxDistance < dist)
{
continue;
}
// 対象の点のdepth値が現在のdepth値よりも小さかったら遮蔽とみなす(= 対象の点が現在の点よりもカメラに近かったら)
if (samplingViewPosition.z > offsetViewPosition.z)
{
occludedCount++;
}
}
float aoRate = (float)occludedCount / (float)divCount;
// NOTE: 本当は環境光のみにAO項を考慮するのがよいが、forward x post process の場合は全体にかけちゃう
color.rgb = lerp(
baseColor,
_OcclusionColor.rgb,
aoRate * _OcclusionStrength
);
color.rgb = lerp(baseColor, color.rgb, _Blend);
color.a = 1;
return color;
}
ENDHLSL
SubShader
{
Cull Off ZWrite Off ZTest Always
Pass
{
HLSLPROGRAM
#pragma vertex VertDefault
#pragma fragment Frag
ENDHLSL
}
}
}