ARKitBackground.shader
6.71 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
Shader "Unlit/ARKitBackground"
{
Properties
{
_textureY ("TextureY", 2D) = "white" {}
_textureCbCr ("TextureCbCr", 2D) = "black" {}
_HumanStencil ("HumanStencil", 2D) = "black" {}
_HumanDepth ("HumanDepth", 2D) = "black" {}
}
SubShader
{
Tags
{
"Queue" = "Background"
"RenderType" = "Background"
"ForceNoShadowCasting" = "True"
}
Pass
{
Cull Off
ZTest Always
ZWrite On
Lighting Off
LOD 100
Tags
{
"LightMode" = "Always"
}
HLSLPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma multi_compile_local __ ARKIT_BACKGROUND_URP ARKIT_BACKGROUND_LWRP
#pragma multi_compile_local __ ARKIT_HUMAN_SEGMENTATION_ENABLED
#if ARKIT_BACKGROUND_URP
#include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Color.hlsl"
#define ARKIT_TEXTURE2D_HALF(texture) TEXTURE2D(texture)
#define ARKIT_SAMPLER_HALF(sampler) SAMPLER(sampler)
#define ARKIT_TEXTURE2D_FLOAT(texture) TEXTURE2D(texture)
#define ARKIT_SAMPLER_FLOAT(sampler) SAMPLER(sampler)
#define ARKIT_SAMPLE_TEXTURE2D(texture,sampler,texcoord) SAMPLE_TEXTURE2D(texture,sampler,texcoord)
#elif ARKIT_BACKGROUND_LWRP
#include "Packages/com.unity.render-pipelines.lightweight/ShaderLibrary/Core.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Color.hlsl"
#define ARKIT_TEXTURE2D_HALF(texture) TEXTURE2D(texture)
#define ARKIT_SAMPLER_HALF(sampler) SAMPLER(sampler)
#define ARKIT_TEXTURE2D_FLOAT(texture) TEXTURE2D(texture)
#define ARKIT_SAMPLER_FLOAT(sampler) SAMPLER(sampler)
#define ARKIT_SAMPLE_TEXTURE2D(texture,sampler,texcoord) SAMPLE_TEXTURE2D(texture,sampler,texcoord)
#else // Legacy RP
#include "UnityCG.cginc"
#define real4 half4
#define real4x4 half4x4
#define TransformObjectToHClip UnityObjectToClipPos
#define FastSRGBToLinear GammaToLinearSpace
#define ARKIT_TEXTURE2D_HALF(texture) UNITY_DECLARE_TEX2D_HALF(texture)
#define ARKIT_SAMPLER_HALF(sampler)
#define ARKIT_TEXTURE2D_FLOAT(texture) UNITY_DECLARE_TEX2D_FLOAT(texture)
#define ARKIT_SAMPLER_FLOAT(sampler)
#define ARKIT_SAMPLE_TEXTURE2D(texture,sampler,texcoord) UNITY_SAMPLE_TEX2D(texture,texcoord)
#endif
struct appdata
{
float3 position : POSITION;
float2 texcoord : TEXCOORD0;
};
struct v2f
{
float4 position : SV_POSITION;
float2 texcoord : TEXCOORD0;
};
struct fragment_output
{
real4 color : SV_Target;
float depth : SV_Depth;
};
CBUFFER_START(UnityARFoundationPerFrame)
// Device display transform is provided by the AR Foundation camera background renderer.
float4x4 _UnityDisplayTransform;
float _UnityCameraForwardScale;
CBUFFER_END
v2f vert (appdata v)
{
// Transform the position from object space to clip space.
float4 position = TransformObjectToHClip(v.position);
// Remap the texture coordinates based on the device rotation.
float2 texcoord = mul(float3(v.texcoord, 1.0f), _UnityDisplayTransform).xy;
v2f o;
o.position = position;
o.texcoord = texcoord;
return o;
}
CBUFFER_START(ARKitColorTransformations)
static const real4x4 s_YCbCrToSRGB = real4x4(
real4(1.0h, 0.0000h, 1.4020h, -0.7010h),
real4(1.0h, -0.3441h, -0.7141h, 0.5291h),
real4(1.0h, 1.7720h, 0.0000h, -0.8860h),
real4(0.0h, 0.0000h, 0.0000h, 1.0000h)
);
CBUFFER_END
inline float ConvertDistanceToDepth(float d)
{
// Account for scale
d = _UnityCameraForwardScale > 0.0 ? _UnityCameraForwardScale * d : d;
// Clip any distances smaller than the near clip plane, and compute the depth value from the distance.
return (d < _ProjectionParams.y) ? 0.0f : ((1.0f / _ZBufferParams.z) * ((1.0f / d) - _ZBufferParams.w));
}
ARKIT_TEXTURE2D_HALF(_textureY);
ARKIT_SAMPLER_HALF(sampler_textureY);
ARKIT_TEXTURE2D_HALF(_textureCbCr);
ARKIT_SAMPLER_HALF(sampler_textureCbCr);
#if ARKIT_HUMAN_SEGMENTATION_ENABLED
ARKIT_TEXTURE2D_HALF(_HumanStencil);
ARKIT_SAMPLER_HALF(sampler_HumanStencil);
ARKIT_TEXTURE2D_FLOAT(_HumanDepth);
ARKIT_SAMPLER_FLOAT(sampler_HumanDepth);
#endif // ARKIT_HUMAN_SEGMENTATION_ENABLED
fragment_output frag (v2f i)
{
// Sample the video textures (in YCbCr).
real4 ycbcr = real4(ARKIT_SAMPLE_TEXTURE2D(_textureY, sampler_textureY, i.texcoord).r,
ARKIT_SAMPLE_TEXTURE2D(_textureCbCr, sampler_textureCbCr, i.texcoord).rg,
1.0h);
// Convert from YCbCr to sRGB.
real4 videoColor = mul(s_YCbCrToSRGB, ycbcr);
#if !UNITY_COLORSPACE_GAMMA
// If rendering in linear color space, convert from sRGB to RGB.
videoColor.xyz = FastSRGBToLinear(videoColor.xyz);
#endif // !UNITY_COLORSPACE_GAMMA
// Assume the background depth is the back of the depth clipping volume.
float depthValue = 0.0f;
#if ARKIT_HUMAN_SEGMENTATION_ENABLED
// Check the human stencil, and skip non-human pixels.
if (ARKIT_SAMPLE_TEXTURE2D(_HumanStencil, sampler_HumanStencil, i.texcoord).r > 0.5h)
{
// Sample the human depth (in meters).
float humanDistance = ARKIT_SAMPLE_TEXTURE2D(_HumanDepth, sampler_HumanDepth, i.texcoord).r;
// Convert the distance to depth.
depthValue = ConvertDistanceToDepth(humanDistance);
}
#endif // ARKIT_HUMAN_SEGMENTATION_ENABLED
fragment_output o;
o.color = videoColor;
o.depth = depthValue;
return o;
}
ENDHLSL
}
}
}