/
BuiltinFunctions.glsl
974 lines (893 loc) · 29.2 KB
/
BuiltinFunctions.glsl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
/**
* DOC_TBA
*
* @name czm_infinity
* @glslConstant
*/
const float czm_infinity = 5906376272000.0; // Distance from the Sun to Pluto in meters. TODO: What is best given lowp, mediump, and highp?
/**
* DOC_TBA
*
* @name czm_epsilon1
* @glslConstant
*/
const float czm_epsilon1 = 0.1;
/**
* DOC_TBA
*
* @name czm_epsilon2
* @glslConstant
*/
const float czm_epsilon2 = 0.01;
/**
* DOC_TBA
*
* @name czm_epsilon3
* @glslConstant
*/
const float czm_epsilon3 = 0.001;
/**
* DOC_TBA
*
* @name czm_epsilon4
* @glslConstant
*/
const float czm_epsilon4 = 0.0001;
/**
* DOC_TBA
*
* @name czm_epsilon5
* @glslConstant
*/
const float czm_epsilon5 = 0.00001;
/**
* DOC_TBA
*
* @name czm_epsilon6
* @glslConstant
*/
const float czm_epsilon6 = 0.000001;
/**
* DOC_TBA
*
* @name czm_epsilon7
* @glslConstant
*/
const float czm_epsilon7 = 0.0000001;
/**
* DOC_TBA
*
* @name czm_equalsEpsilon
* @glslFunction
*/
bool czm_equalsEpsilon(float left, float right, float epsilon) {
return (abs(left - right) <= epsilon);
}
bool czm_equalsEpsilon(float left, float right) {
// Workaround bug in Opera Next 12. Do not delegate to the other czm_equalsEpsilon.
return (abs(left - right) <= czm_epsilon7);
}
///////////////////////////////////////////////////////////////////////////////
/**
* Returns the transpose of the matrix. The input <code>matrix</code> can be
* a <code>mat2</code>, <code>mat3</code>, or <code>mat4</code>.
*
* @name czm_transpose
* @glslFunction
*
* @param {} matrix The matrix to transpose.
*
* @returns {} The transposed matrix.
*
* @example
* // GLSL declarations
* mat2 czm_transpose(mat2 matrix);
* mat3 czm_transpose(mat3 matrix);
* mat4 czm_transpose(mat4 matrix);
*
* // Tranpose a 3x3 rotation matrix to find its inverse.
* mat3 eastNorthUpToEye = czm_eastNorthUpToEyeCoordinates(
* positionMC, normalEC);
* mat3 eyeToEastNorthUp = czm_transpose(eastNorthUpToEye);
*/
mat2 czm_transpose(mat2 matrix)
{
return mat2(
matrix[0][0], matrix[1][0],
matrix[0][1], matrix[1][1]);
}
mat3 czm_transpose(mat3 matrix)
{
return mat3(
matrix[0][0], matrix[1][0], matrix[2][0],
matrix[0][1], matrix[1][1], matrix[2][1],
matrix[0][2], matrix[1][2], matrix[2][2]);
}
mat4 czm_transpose(mat4 matrix)
{
return mat4(
matrix[0][0], matrix[1][0], matrix[2][0], matrix[3][0],
matrix[0][1], matrix[1][1], matrix[2][1], matrix[3][1],
matrix[0][2], matrix[1][2], matrix[2][2], matrix[3][2],
matrix[0][3], matrix[1][3], matrix[2][3], matrix[3][3]);
}
///////////////////////////////////////////////////////////////////////////////
/**
* Transforms a position from model to window coordinates. The transformation
* from model to clip coordinates is done using {@link czm_modelViewProjection}.
* The transform from normalized device coordinates to window coordinates is
* done using {@link czm_viewportTransformation}, which assumes a depth range
* of <code>near = 0</code> and <code>far = 1</code>.
* <br /><br />
* This transform is useful when there is a need to manipulate window coordinates
* in a vertex shader as done by {@link BillboardCollection}.
* <br /><br />
* This function should not be confused with {@link czm_viewportOrthographic},
* which is an orthographic projection matrix that transforms from window
* coordinates to clip coordinates.
*
* @name czm_modelToWindowCoordinates
* @glslFunction
*
* @param {vec4} position The position in model coordinates to transform.
*
* @returns {vec4} The transformed position in window coordinates.
*
* @see czm_eyeToWindowCoordinates
* @see czm_modelViewProjection
* @see czm_viewportTransformation
* @see czm_viewportOrthographic
* @see BillboardCollection
*
* @example
* vec4 positionWC = czm_modelToWindowCoordinates(positionMC);
*/
vec4 czm_modelToWindowCoordinates(vec4 position)
{
vec4 q = czm_modelViewProjection * position; // clip coordinates
q.xyz /= q.w; // normalized device coordinates
q.xyz = (czm_viewportTransformation * vec4(q.xyz, 1.0)).xyz; // window coordinates
return q;
}
/**
* Transforms a position from eye to window coordinates. The transformation
* from eye to clip coordinates is done using {@link czm_projection}.
* The transform from normalized device coordinates to window coordinates is
* done using {@link czm_viewportTransformation}, which assumes a depth range
* of <code>near = 0</code> and <code>far = 1</code>.
* <br /><br />
* This transform is useful when there is a need to manipulate window coordinates
* in a vertex shader as done by {@link BillboardCollection}.
*
* @name czm_eyeToWindowCoordinates
* @glslFunction
*
* @param {vec4} position The position in eye coordinates to transform.
*
* @returns {vec4} The transformed position in window coordinates.
*
* @see czm_modelToWindowCoordinates
* @see czm_projection
* @see czm_viewportTransformation
* @see BillboardCollection
*
* @example
* vec4 positionWC = czm_eyeToWindowCoordinates(positionEC);
*/
vec4 czm_eyeToWindowCoordinates(vec4 positionEC)
{
vec4 q = czm_projection * positionEC; // clip coordinates
q.xyz /= q.w; // normalized device coordinates
q.xyz = (czm_viewportTransformation * vec4(q.xyz, 1.0)).xyz; // window coordinates
return q;
}
/**
* Transforms a position from window to eye coordinates.
* The transform from window to normalized device coordinates is done using components
* of (@link czm_viewport} and {@link czm_viewportTransformation} instead of calculating
* the inverse of <code>czm_viewportTransformation</code>. The transformation from
* normalized device coordinates to clip coordinates is done using <code>positionWC.w</code>,
* which is expected to be the scalar used in the perspective divide. The transformation
* from clip to eye coordinates is done using {@link czm_inverseProjection}.
*
* @name czm_windowToEyeCoordinates
* @glslFunction
*
* @param {vec4} fragmentCoordinate The position in window coordinates to transform.
*
* @returns {vec4} The transformed position in eye coordinates.
*
* @see czm_modelToWindowCoordinates
* @see czm_eyeToWindowCoordinates
* @see czm_inverseProjection
* @see czm_viewport
* @see czm_viewportTransformation
*
* @example
* vec4 positionEC = czm_windowToEyeCoordinates(gl_FragCoord);
*/
vec4 czm_windowToEyeCoordinates(vec4 fragmentCoordinate)
{
float x = 2.0 * (fragmentCoordinate.x - czm_viewport.x) / czm_viewport.z - 1.0;
float y = 2.0 * (fragmentCoordinate.y - czm_viewport.y) / czm_viewport.w - 1.0;
float z = (fragmentCoordinate.z - czm_viewportTransformation[3][2]) / czm_viewportTransformation[2][2];
vec4 q = vec4(x, y, z, 1.0);
q /= fragmentCoordinate.w;
q = czm_inverseProjection * q;
return q;
}
///////////////////////////////////////////////////////////////////////////////
/**
* DOC_TBA
*
* @name czm_eyeOffset
* @glslFunction
*
* @param {vec4} positionEC DOC_TBA.
* @param {vec3} eyeOffset DOC_TBA.
*
* @returns {vec4} DOC_TBA.
*/
vec4 czm_eyeOffset(vec4 positionEC, vec3 eyeOffset)
{
// This equation is approximate in x and y.
vec4 p = positionEC;
vec4 zEyeOffset = normalize(p) * eyeOffset.z;
p.xy += eyeOffset.xy + zEyeOffset.xy;
p.z += zEyeOffset.z;
return p;
}
///////////////////////////////////////////////////////////////////////////////
/**
* DOC_TBA
*
* @name czm_geodeticSurfaceNormal
* @glslFunction
*
* @param {vec3} positionOnEllipsoid DOC_TBA
* @param {vec3} ellipsoidCenter DOC_TBA
* @param {vec3} oneOverEllipsoidRadiiSquared DOC_TBA
*
* @returns {vec3} DOC_TBA.
*/
vec3 czm_geodeticSurfaceNormal(vec3 positionOnEllipsoid, vec3 ellipsoidCenter, vec3 oneOverEllipsoidRadiiSquared)
{
return normalize((positionOnEllipsoid - ellipsoidCenter) * oneOverEllipsoidRadiiSquared);
}
/**
* DOC_TBA
*
* @name czm_ellipsoidWgs84TextureCoordinates
* @glslFunction
*/
vec2 czm_ellipsoidWgs84TextureCoordinates(vec3 normal)
{
return vec2(atan(normal.y, normal.x) * czm_oneOverTwoPi + 0.5, asin(normal.z) * czm_oneOverPi + 0.5);
}
/**
* Computes a 3x3 rotation matrix that transforms vectors from an ellipsoid's east-north-up coordinate system
* to eye coordinates. In east-north-up coordinates, x points east, y points north, and z points along the
* surface normal. East-north-up can be used as an ellipsoid's tangent space for operations such as bump mapping.
* <br /><br />
* The ellipsoid is assumed to be centered at the model coordinate's origin.
*
* @name czm_eastNorthUpToEyeCoordinates
* @glslFunction
*
* @param {vec3} positionMC The position on the ellipsoid in model coordinates.
* @param {vec3} normalEC The normalized ellipsoid surface normal, at <code>positionMC</code>, in eye coordinates.
*
* @returns {mat3} A 3x3 rotation matrix that transforms vectors from the east-north-up coordinate system to eye coordinates.
*
* @example
* // Transform a vector defined in the east-north-up coordinate
* // system, (0, 0, 1) which is the surface normal, to eye
* // coordinates.
* mat3 m = czm_eastNorthUpToEyeCoordinates(positionMC, normalEC);
* vec3 normalEC = m * vec3(0.0, 0.0, 1.0);
*/
mat3 czm_eastNorthUpToEyeCoordinates(vec3 positionMC, vec3 normalEC)
{
vec3 tangentMC = normalize(vec3(-positionMC.y, positionMC.x, 0.0)); // normalized surface tangent in model coordinates
vec3 tangentEC = normalize(czm_normal3D * tangentMC); // normalized surface tangent in eye coordiantes
vec3 bitangentEC = normalize(cross(normalEC, tangentEC)); // normalized surface bitangent in eye coordinates
return mat3(
tangentEC.x, tangentEC.y, tangentEC.z,
bitangentEC.x, bitangentEC.y, bitangentEC.z,
normalEC.x, normalEC.y, normalEC.z);
}
///////////////////////////////////////////////////////////////////////////////
/**
* Used as input to every material's czm_getMaterial function.
*
* @name czm_materialInput
* @glslStruct
*
* @property {float} s 1D texture coordinates.
* @property {vec2} st 2D texture coordinates.
* @property {vec3} str 3D texture coordinates.
* @property {vec3} normalEC Unperturbed surface normal in eye coordinates.
* @property {mat3} tangentToEyeMatrix Matrix for converting a tangent space normal to eye space.
* @property {vec3} positionToEyeEC Vector from the fragment to the eye in eye coordinates. The magnitude is the distance in meters from the fragment to the eye.
* @property {vec3} positionMC Position in model coordinates.
*/
struct czm_materialInput
{
float s;
vec2 st;
vec3 str;
vec3 normalEC;
mat3 tangentToEyeMatrix;
vec3 positionToEyeEC;
vec3 positionMC;
};
/**
* Holds material information that can be used for lighting. Returned by all czm_getMaterial functions.
*
* @name czm_material
* @glslStruct
*
* @property {vec3} diffuse Incoming light that scatters evenly in all directions.
* @property {float} specular Intensity of incoming light reflecting in a single direction.
* @property {float} shininess The sharpness of the specular reflection. Higher values create a smaller, more focused specular highlight.
* @property {vec3} normal Surface's normal in eye coordinates. It is used for effects such as normal mapping. The default is the surface's unmodified normal.
* @property {vec3} emission Light emitted by the material equally in all directions. The default is vec3(0.0), which emits no light.
* @property {float} alpha Opacity of this material. 0.0 is completely transparent; 1.0 is completely opaque.
*/
struct czm_material
{
vec3 diffuse;
float specular;
float shininess;
vec3 normal;
vec3 emission;
float alpha;
};
/**
* An czm_material with default values. Every material's czm_getMaterial
* should use this default material as a base for the material it returns.
* The default normal value is given by materialInput.normalEC.
*
* @name czm_getDefaultMaterial
* @glslFunction
*
* @param {czm_materialInput} input The input used to construct the default material.
*
* @returns {czm_material} The default material.
*
* @see czm_materialInput
* @see czm_material
* @see czm_getMaterial
*/
czm_material czm_getDefaultMaterial(czm_materialInput materialInput)
{
czm_material material;
material.diffuse = vec3(0.0);
material.specular = 0.0;
material.shininess = 1.0;
material.normal = materialInput.normalEC;
material.emission = vec3(0.0);
material.alpha = 1.0;
return material;
}
float getLambertDiffuse(vec3 lightDirectionEC, vec3 normalEC)
{
return max(dot(lightDirectionEC, normalEC), 0.0);
}
float getLambertDiffuseOfMaterial(vec3 lightDirectionEC, czm_material material)
{
return getLambertDiffuse(lightDirectionEC, material.normal);
}
float getSpecular(vec3 lightDirectionEC, vec3 toEyeEC, vec3 normalEC, float shininess)
{
vec3 toReflectedLight = reflect(-lightDirectionEC, normalEC);
float specular = max(dot(toReflectedLight, toEyeEC), 0.0);
return pow(specular, shininess);
}
float getSpecularOfMaterial(vec3 lightDirectionEC, vec3 toEyeEC, czm_material material)
{
return getSpecular(lightDirectionEC, toEyeEC, material.normal, material.shininess);
}
/**
* Computes a color using the Phong lighting model.
*
* @name czm_phong
* @glslFunction
*
* @param {vec3} toEye A normalized vector from the fragment to the eye in eye coordinates.
* @param {czm_material} material The fragment's material.
*
* @returns {vec4} The computed color.
*
* @example
* vec3 positionToEyeEC = // ...
* czm_material material = // ...
* gl_FragColor = czm_phong(normalize(positionToEyeEC), material);
*
* @see czm_getMaterial
*/
vec4 czm_phong(vec3 toEye, czm_material material)
{
// Diffuse from directional light sources at eye (for top-down and horizon views)
float diffuse = getLambertDiffuseOfMaterial(vec3(0.0, 0.0, 1.0), material) + getLambertDiffuseOfMaterial(vec3(0.0, 1.0, 0.0), material);
// Specular from sun and pseudo-moon
float specular = getSpecularOfMaterial(czm_sunDirectionEC, toEye, material) + getSpecularOfMaterial(czm_moonDirectionEC, toEye, material);
vec3 ambient = vec3(0.0);
vec3 color = ambient + material.emission;
color += material.diffuse * diffuse;
color += material.specular * specular;
return vec4(color, material.alpha);
}
/**
* Computes the luminance of a color.
*
* @name czm_luminance
* @glslFunction
*
* @param {vec3} rgb The color.
*
* @returns {float} The luminance.
*
* @example
* float light = czm_luminance(vec3(0.0)); // 0.0
* float dark = czm_luminance(vec3(1.0)); // ~1.0
*/
float czm_luminance(vec3 rgb)
{
// Algorithm from Chapter 10 of Graphics Shaders.
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
return dot(rgb, W);
}
/**
* Adjusts the hue of a color.
*
* @name czm_hue
* @glslFunction
*
* @param {vec3} rgb The color.
* @param {float} adjustment The amount to adjust the hue of the color in radians.
*
* @returns {float} The color with the hue adjusted.
*
* @example
* vec3 adjustHue = czm_hue(color, czm_pi); // The same as czm_hue(color, -czm_pi)
*/
vec3 czm_hue(vec3 rgb, float adjustment)
{
const mat3 toYIQ = mat3(0.299, 0.587, 0.114,
0.595716, -0.274453, -0.321263,
0.211456, -0.522591, 0.311135);
const mat3 toRGB = mat3(1.0, 0.9563, 0.6210,
1.0, -0.2721, -0.6474,
1.0, -1.107, 1.7046);
vec3 yiq = toYIQ * rgb;
float hue = atan(yiq.z, yiq.y) + adjustment;
float chroma = sqrt(yiq.z * yiq.z + yiq.y * yiq.y);
vec3 color = vec3(yiq.x, chroma * cos(hue), chroma * sin(hue));
return toRGB * color;
}
/**
* Adjusts the saturation of a color.
*
* @name czm_saturation
* @glslFunction
*
* @param {vec3} rgb The color.
* @param {float} adjustment The amount to adjust the saturation of the color.
*
* @returns {float} The color with the saturation adjusted.
*
* @example
* vec3 greyScale = czm_saturation(color, 0.0);
* vec3 doubleSaturation = czm_saturation(color, 2.0);
*/
vec3 czm_saturation(vec3 rgb, float adjustment)
{
// Algorithm from Chapter 16 of OpenGL Shading Language
vec3 intensity = vec3(czm_luminance(rgb));
return mix(intensity, rgb, adjustment);
}
/**
* DOC_TBA
*
* @name czm_multiplyWithColorBalance
* @glslFunction
*/
vec3 czm_multiplyWithColorBalance(vec3 left, vec3 right)
{
// Algorithm from Chapter 10 of Graphics Shaders.
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
vec3 target = left * right;
float leftLuminance = dot(left, W);
float rightLuminance = dot(right, W);
float targetLuminance = dot(target, W);
return ((leftLuminance + rightLuminance) / (2.0 * targetLuminance)) * target;
}
///////////////////////////////////////////////////////////////////////////////
/**
* The maximum latitude, in radians, both North and South, supported by a Web Mercator
* (EPSG:3857) projection. Technically, the Mercator projection is defined
* for any latitude up to (but not including) 90 degrees, but it makes sense
* to cut it off sooner because it grows exponentially with increasing latitude.
* The logic behind this particular cutoff value, which is the one used by
* Google Maps, Bing Maps, and Esri, is that it makes the projection
* square. That is, the extent is equal in the X and Y directions.
*
* The constant value is computed as follows:
* czm_pi * 0.5 - (2.0 * atan(exp(-czm_pi)))
*
* @name czm_webMercatorMaxLatitude
* @glslConstant
*/
const float czm_webMercatorMaxLatitude = 1.4844222297453323669610967939;
/**
* Specifies a flat, 2D map.
*
* @name czm_scene2D
* @glslConstant
*/
const int czm_scene2D = 0;
/**
* Specifies 2.D Columbus View.
*
* @name czm_columbusView
* @glslConstant
*/
const int czm_columbusView = 1;
/**
* Specifies a 3D globe.
*
* @name czm_scene3D
* @glslConstant
*/
const int czm_scene3D = 2;
/**
* Specifies that the scene is morphing between modes.
*
* @name czm_morphing
* @glslConstant
*/
const int czm_morphing = 3;
/**
* DOC_TBA
*
* @name czm_columbusViewMorph
* @glslFunction
*/
vec4 czm_columbusViewMorph(vec3 position2D, vec3 position3D, float time)
{
// Just linear for now.
vec3 p = mix(position2D, position3D, time);
return vec4(p, 1.0);
}
///////////////////////////////////////////////////////////////////////////////
/**
* DOC_TBA
*
* @name czm_ray
* @glslStruct
*/
struct czm_ray
{
vec3 origin;
vec3 direction;
};
/**
* Computes the point along a ray at the given time. <code>time</code> can be positive, negative, or zero.
*
* @name czm_pointAlongRay
* @glslFunction
*
* @param {czm_ray} ray The ray to compute the point along.
* @param {float} time The time along the ray.
*
* @returns {vec3} The point along the ray at the given time.
*
* @example
* czm_ray ray = czm_ray(vec3(0.0), vec3(1.0, 0.0, 0.0)); // origin, direction
* vec3 v = czm_pointAlongRay(ray, 2.0); // (2.0, 0.0, 0.0)
*/
vec3 czm_pointAlongRay(czm_ray ray, float time)
{
return ray.origin + (time * ray.direction);
}
///////////////////////////////////////////////////////////////////////////////
/**
* DOC_TBA
*
* @name czm_raySegment
* @glslStruct
*/
struct czm_raySegment
{
float start;
float stop;
};
/**
* DOC_TBA
*
* @name czm_emptyRaySegment
* @glslConstant
*/
const czm_raySegment czm_emptyRaySegment = czm_raySegment(-czm_infinity, -czm_infinity);
/**
* DOC_TBA
*
* @name czm_fullRaySegment
* @glslConstant
*/
const czm_raySegment czm_fullRaySegment = czm_raySegment(0.0, czm_infinity);
/**
* Determines if a time interval is empty.
*
* @name czm_isEmpty
* @glslFunction
*
* @param {czm_raySegment} interval The interval to test.
*
* @returns {bool} <code>true</code> if the time interval is empty; otherwise, <code>false</code>.
*
* @example
* bool b0 = czm_isEmpty(czm_emptyRaySegment); // true
* bool b1 = czm_isEmpty(czm_raySegment(0.0, 1.0)); // false
* bool b2 = czm_isEmpty(czm_raySegment(1.0, 1.0)); // false, contains 1.0.
*/
bool czm_isEmpty(czm_raySegment interval)
{
return (interval.stop < 0.0);
}
/**
* Determines if a time interval is empty.
*
* @name czm_isFull
* @glslFunction
*
* @param {czm_raySegment} interval The interval to test.
*
* @returns {bool} <code>true</code> if the time interval is empty; otherwise, <code>false</code>.
*
* @example
* bool b0 = czm_isEmpty(czm_emptyRaySegment); // true
* bool b1 = czm_isEmpty(czm_raySegment(0.0, 1.0)); // false
* bool b2 = czm_isEmpty(czm_raySegment(1.0, 1.0)); // false, contains 1.0.
*/
bool czm_isFull(czm_raySegment interval)
{
return (interval.start == 0.0 && interval.stop == czm_infinity);
}
///////////////////////////////////////////////////////////////////////////////
/**
* DOC_TBA
*
* @name czm_ellipsoid
* @glslStruct
*/
struct czm_ellipsoid
{
vec3 center;
vec3 radii;
vec3 inverseRadii;
vec3 inverseRadiiSquared;
};
/**
* DOC_TBA
*
* @name czm_ellipsoidNew
* @glslFunction
*
*/
czm_ellipsoid czm_ellipsoidNew(vec3 center, vec3 radii)
{
vec3 inverseRadii = vec3(1.0 / radii.x, 1.0 / radii.y, 1.0 / radii.z);
vec3 inverseRadiiSquared = inverseRadii * inverseRadii;
czm_ellipsoid temp = czm_ellipsoid(center, radii, inverseRadii, inverseRadiiSquared);
return temp;
}
/**
* DOC_TBA
*
* @name czm_ellipsoidContainsPoint
* @glslFunction
*
*/
bool czm_ellipsoidContainsPoint(czm_ellipsoid ellipsoid, vec3 point)
{
vec3 scaled = ellipsoid.inverseRadii * (czm_inverseModelView * vec4(point, 1.0)).xyz;
return (dot(scaled, scaled) <= 1.0);
}
/**
* DOC_TBA
*
*
* @name czm_rayEllipsoidIntersectionInterval
* @glslFunction
*/
czm_raySegment czm_rayEllipsoidIntersectionInterval(czm_ray ray, czm_ellipsoid ellipsoid)
{
// ray and ellipsoid center in eye coordinates. radii in model coordinates.
vec3 q = ellipsoid.inverseRadii * (czm_inverseModelView * vec4(ray.origin, 1.0)).xyz;
vec3 w = ellipsoid.inverseRadii * (czm_inverseModelView * vec4(ray.direction, 0.0)).xyz;
q = q - ellipsoid.inverseRadii * (czm_inverseModelView * vec4(ellipsoid.center, 1.0)).xyz;
float q2 = dot(q, q);
float qw = dot(q, w);
if (q2 > 1.0) // Outside ellipsoid.
{
if (qw >= 0.0) // Looking outward or tangent (0 intersections).
{
return czm_emptyRaySegment;
}
else // qw < 0.0.
{
float qw2 = qw * qw;
float difference = q2 - 1.0; // Positively valued.
float w2 = dot(w, w);
float product = w2 * difference;
if (qw2 < product) // Imaginary roots (0 intersections).
{
return czm_emptyRaySegment;
}
else if (qw2 > product) // Distinct roots (2 intersections).
{
float discriminant = qw * qw - product;
float temp = -qw + sqrt(discriminant); // Avoid cancellation.
float root0 = temp / w2;
float root1 = difference / temp;
if (root0 < root1)
{
czm_raySegment i = czm_raySegment(root0, root1);
return i;
}
else
{
czm_raySegment i = czm_raySegment(root1, root0);
return i;
}
}
else // qw2 == product. Repeated roots (2 intersections).
{
float root = sqrt(difference / w2);
czm_raySegment i = czm_raySegment(root, root);
return i;
}
}
}
else if (q2 < 1.0) // Inside ellipsoid (2 intersections).
{
float difference = q2 - 1.0; // Negatively valued.
float w2 = dot(w, w);
float product = w2 * difference; // Negatively valued.
float discriminant = qw * qw - product;
float temp = -qw + sqrt(discriminant); // Positively valued.
czm_raySegment i = czm_raySegment(0.0, temp / w2);
return i;
}
else // q2 == 1.0. On ellipsoid.
{
if (qw < 0.0) // Looking inward.
{
float w2 = dot(w, w);
czm_raySegment i = czm_raySegment(0.0, -qw / w2);
return i;
}
else // qw >= 0.0. Looking outward or tangent.
{
return czm_emptyRaySegment;
}
}
}
/**
* Returns the WGS84 ellipsoid, with its center at the origin of world coordinates, in eye coordinates.
*
* @name czm_getWgs84EllipsoidEC
* @glslFunction
*
* @returns {czm_ellipsoid} The WGS84 ellipsoid, with its center at the origin of world coordinates, in eye coordinates.
*
* @see Ellipsoid.getWgs84
*
* @example
* czm_ellipsoid ellipsoid = czm_getWgs84EllipsoidEC();
*/
czm_ellipsoid czm_getWgs84EllipsoidEC()
{
return czm_ellipsoidNew(
czm_view[3].xyz, // center
vec3(6378137.0, 6378137.0, 6356752.314245)); // radii
}
/**
* Computes the fraction of a Web Wercator extent at which a given geodetic latitude is located.
*
* @name czm_latitudeToWebMercatorFraction
* @glslFunction
*
* @param {float} The geodetic latitude, in radians.
* @param {float} The low portion of the Web Mercator coordinate of the southern boundary of the extent.
* @param {float} The high portion of the Web Mercator coordinate of the southern boundary of the extent.
* @param {float} The total height of the extent in Web Mercator coordinates.
*
* @returns {float} The fraction of the extent at which the latitude occurs. If the latitude is the southern
* boundary of the extent, the return value will be zero. If it is the northern boundary, the return
* value will be 1.0. Latitudes in between are mapped according to the Web Mercator projection.
*/
float czm_latitudeToWebMercatorFraction(float latitude, float southMercatorYLow, float southMercatorYHigh, float oneOverMercatorHeight)
{
float sinLatitude = sin(latitude);
float mercatorY = 0.5 * log((1.0 + sinLatitude) / (1.0 - sinLatitude));
// mercatorY - southMercatorY in simulated double precision.
float t1 = 0.0 - southMercatorYLow;
float e = t1 - 0.0;
float t2 = ((-southMercatorYLow - e) + (0.0 - (t1 - e))) + mercatorY - southMercatorYHigh;
float highDifference = t1 + t2;
float lowDifference = t2 - (highDifference - t1);
return highDifference * oneOverMercatorHeight + lowDifference * oneOverMercatorHeight;
}
/**
* Translates a position (or any <code>vec3</code>) that was encoded with {@link EncodedCartesian3},
* and then provided to the shader as separate <code>high</code> and <code>low</code> bits to
* be relative to the eye. As shown in the example, the position can then be transformed in eye
* or clip coordinates using {@link czm_modelViewRelativeToEye} or {@link czm_modelViewProjectionRelativeToEye},
* respectively.
* <p>
* This technique, called GPU RTE, eliminates jittering artifacts when using large coordinates as
* described in <a href="http://blogs.agi.com/insight3d/index.php/2008/09/03/precisions-precisions/">Precisions, Precisions</a>.
* </p>
*
* @name czm_translateRelativeToEye
* @glslFunction
*
* @param {vec3} high The position's high bits.
* @param {vec3} low The position's low bits.
* @returns {vec3} The position translated to be relative to the camera's position.
*
* @example
* attribute vec3 positionHigh;
* attribute vec3 positionLow;
*
* void main()
* {
* vec3 p = czm_translateRelativeToEye(positionHigh, positionLow);
* gl_Position = czm_modelViewProjectionRelativeToEye * vec4(p, 1.0);
* }
*
* @see czm_modelViewRelativeToEye
* @see czm_modelViewProjectionRelativeToEye
* @see EncodedCartesian3
*/
vec3 czm_translateRelativeToEye(vec3 high, vec3 low)
{
vec3 highDifference = high - czm_encodedCameraPositionMCHigh;
vec3 lowDifference = low - czm_encodedCameraPositionMCLow;
return highDifference + lowDifference;
}
/**
* @private
*/
vec4 czm_getWaterNoise(sampler2D normalMap, vec2 uv, float time, float angleInRadians)
{
float cosAngle = cos(angleInRadians);
float sinAngle = sin(angleInRadians);
// time dependent sampling directions
vec2 s0 = vec2(1.0/17.0, 0.0);
vec2 s1 = vec2(-1.0/29.0, 0.0);
vec2 s2 = vec2(1.0/101.0, 1.0/59.0);
vec2 s3 = vec2(-1.0/109.0, -1.0/57.0);
// rotate sampling direction by specified angle
s0 = vec2((cosAngle * s0.x) - (sinAngle * s0.y), (sinAngle * s0.x) + (cosAngle * s0.y));
s1 = vec2((cosAngle * s1.x) - (sinAngle * s1.y), (sinAngle * s1.x) + (cosAngle * s1.y));
s2 = vec2((cosAngle * s2.x) - (sinAngle * s2.y), (sinAngle * s2.x) + (cosAngle * s2.y));
s3 = vec2((cosAngle * s3.x) - (sinAngle * s3.y), (sinAngle * s3.x) + (cosAngle * s3.y));
vec2 uv0 = (uv/103.0) + (time * s0);
vec2 uv1 = uv/107.0 + (time * s1) + vec2(0.23);
vec2 uv2 = uv/vec2(897.0, 983.0) + (time * s2) + vec2(0.51);
vec2 uv3 = uv/vec2(991.0, 877.0) + (time * s3) + vec2(0.71);
uv0 = fract(uv0);
uv1 = fract(uv1);
uv2 = fract(uv2);
uv3 = fract(uv3);
vec4 noise = (texture2D(normalMap, uv0)) +
(texture2D(normalMap, uv1)) +
(texture2D(normalMap, uv2)) +
(texture2D(normalMap, uv3));
// average and scale to between -1 and 1
return ((noise / 4.0) - 0.5) * 2.0;
}