diff --git a/src/PostProcesses/RenderPipeline/Pipelines/standardRenderingPipeline.ts b/src/PostProcesses/RenderPipeline/Pipelines/standardRenderingPipeline.ts index ae496df275d5..ce57a06027b5 100644 --- a/src/PostProcesses/RenderPipeline/Pipelines/standardRenderingPipeline.ts +++ b/src/PostProcesses/RenderPipeline/Pipelines/standardRenderingPipeline.ts @@ -20,6 +20,7 @@ import { Scene } from "../../../scene"; import { Constants } from "../../../Engines/constants"; import { _TypeStore } from '../../../Misc/typeStore'; import { MotionBlurPostProcess } from "../../motionBlurPostProcess"; +import { ScreenSpaceReflectionPostProcess } from "../../screenSpaceReflectionPostProcess"; declare type Animation = import("../../../Animations/animation").Animation; @@ -77,10 +78,6 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme * Post-process used to merge the volumetric light effect and the real scene color */ public volumetricLightMergePostProces: Nullable = null; - /** - * Post-process used to store the final volumetric light post-process (attach/detach for debug purpose) - */ - public volumetricLightFinalPostProcess: Nullable = null; /** * Base post-process used to calculate the average luminance of the final image for HDR @@ -100,14 +97,6 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme * Post-process used to store the final texture adder post-process (attach/detach for debug purpose) */ public textureAdderFinalPostProcess: Nullable = null; - /** - * Post-process used to store the final lens flare post-process (attach/detach for debug purpose) - */ - public lensFlareFinalPostProcess: Nullable = null; - /** - * Post-process used to merge the final HDR post-process and the real scene color - */ - public hdrFinalPostProcess: Nullable = null; /** * Post-process used to create a lens flare effect */ @@ -129,6 +118,9 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme */ public fxaaPostProcess: Nullable = null; + public screenSpaceReflectionPostProcess: Nullable = null; + public screenSpaceReflectionMergePostProcess: Nullable = null; + // Values /** @@ -319,6 +311,27 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme } } + /** + * Gets how much the reflections are blurred. + */ + @serialize() + public screenSpaceReflectionBlurWidth: number = 8; + + /** + * Gets wether or not screen space reflections result should be blurred. + */ + @serialize() + public get blurScreenSpaceReflection(): boolean { + return this._blurScreenSpaceReflection; + } + + public set blurScreenSpaceReflection(value: boolean) { + if (this._blurScreenSpaceReflection !== value) { + this._blurScreenSpaceReflection = value; + this._buildPipeline(); + } + } + /** * List of animations for the pipeline (IAnimatable implementation) */ @@ -328,7 +341,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme * Private members */ private _scene: Scene; - private _currentDepthOfFieldSource: Nullable = null; + private _currentFinalColorSource: Nullable = null; private _basePostProcess: Nullable; private _fixedExposure: number = 1.0; @@ -337,6 +350,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme private _hdrCurrentLuminance: number = 1.0; private _motionStrength: number = 1.0; private _isObjectBasedMotionBlur: boolean = false; + private _blurScreenSpaceReflection: boolean = false; private _floatTextureType: number; @@ -353,6 +367,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme private _hdrEnabled: boolean = false; private _motionBlurEnabled: boolean = false; private _fxaaEnabled: boolean = false; + private _screenSpaceReflectionEnabled: boolean = false; private _motionBlurSamples: number = 64.0; private _volumetricLightStepsCount: number = 50.0; @@ -491,6 +506,23 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme this._buildPipeline(); } + /** + * Specifies if screen space reflections is enabled. + */ + @serialize() + public get screenSpaceReflectionEnabled(): boolean { + return this._screenSpaceReflectionEnabled; + } + + public set screenSpaceReflectionEnabled(enabled: boolean) { + if (this._screenSpaceReflectionEnabled === enabled) { + return; + } + + this._screenSpaceReflectionEnabled = enabled; + this._buildPipeline(); + } + /** * Specifies the number of steps used to calculate the volumetric lights * Typically in interval [50, 200] @@ -590,18 +622,22 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme if (!this._basePostProcess) { this.originalPostProcess = new PostProcess("HDRPass", "standard", [], [], ratio, null, Constants.TEXTURE_BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", this._floatTextureType); this.originalPostProcess.onApply = () => { - this._currentDepthOfFieldSource = this.originalPostProcess; + this._currentFinalColorSource = this.originalPostProcess; }; } else { this.originalPostProcess = this._basePostProcess; } - if (this._bloomEnabled || this._vlsEnabled || this._lensFlareEnabled || this._depthOfFieldEnabled || this._motionBlurEnabled) { + if (this._bloomEnabled || this._vlsEnabled || this._lensFlareEnabled || this._depthOfFieldEnabled || this._motionBlurEnabled || this._screenSpaceReflectionEnabled) { this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPassPostProcess", () => { return this.originalPostProcess; }, true)); } - this._currentDepthOfFieldSource = this.originalPostProcess; + this._currentFinalColorSource = this.originalPostProcess; + + if (this._screenSpaceReflectionEnabled) { + this._createScreenSpaceReflectionPostProcess(scene, ratio); + } if (this._bloomEnabled) { // Create down sample X4 post-process @@ -611,32 +647,24 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme this._createBrightPassPostProcess(scene, ratio / 2); // Create gaussian blur post-processes (down sampling blurs) - this._createBlurPostProcesses(scene, ratio / 4, 1); + this._createBlurPostProcesses(scene, ratio / 4); // Create texture adder post-process this._createTextureAdderPostProcess(scene, ratio); // Create depth-of-field source post-process - this.textureAdderFinalPostProcess = new PostProcess("HDRDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT); + this.textureAdderFinalPostProcess = new PostProcess("HDRDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", this._floatTextureType); this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRBaseDepthOfFieldSource", () => { return this.textureAdderFinalPostProcess; }, true)); } if (this._vlsEnabled) { // Create volumetric light this._createVolumetricLightPostProcess(scene, ratio); - - // Create volumetric light final post-process - this.volumetricLightFinalPostProcess = new PostProcess("HDRVLSFinal", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT); - this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSFinal", () => { return this.volumetricLightFinalPostProcess; }, true)); } if (this._lensFlareEnabled) { // Create lens flare post-process this._createLensFlarePostProcess(scene, ratio); - - // Create depth-of-field source post-process post lens-flare and disable it now - this.lensFlareFinalPostProcess = new PostProcess("HDRPostLensFlareDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT); - this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostLensFlareDepthOfFieldSource", () => { return this.lensFlareFinalPostProcess; }, true)); } if (this._hdrEnabled) { @@ -645,15 +673,11 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme // Create HDR this._createHdrPostProcess(scene, ratio); - - // Create depth-of-field source post-process post hdr and disable it now - this.hdrFinalPostProcess = new PostProcess("HDRPostHDReDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT); - this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostHDReDepthOfFieldSource", () => { return this.hdrFinalPostProcess; }, true)); } if (this._depthOfFieldEnabled) { // Create gaussian blur used by depth-of-field - this._createBlurPostProcesses(scene, ratio / 2, 3, "depthOfFieldBlurWidth"); + this._createBlurPostProcesses(scene, ratio / 2, "depthOfFieldBlurWidth"); // Create depth-of-field post-process this._createDepthOfFieldPostProcess(scene, ratio); @@ -666,7 +690,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme if (this._fxaaEnabled) { // Create fxaa post-process - this.fxaaPostProcess = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT); + this.fxaaPostProcess = new FxaaPostProcess("fxaa", 1.0, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, this._floatTextureType); this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRFxaa", () => { return this.fxaaPostProcess; }, true)); } @@ -682,7 +706,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme // Down Sample X4 Post-Processs private _createDownSampleX4PostProcess(scene: Scene, ratio: number): void { var downSampleX4Offsets = new Array(32); - this.downSampleX4PostProcess = new PostProcess("HDRDownSampleX4", "standard", ["dsOffsets"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DOWN_SAMPLE_X4", Constants.TEXTURETYPE_UNSIGNED_INT); + this.downSampleX4PostProcess = new PostProcess("HDRDownSampleX4", "standard", ["dsOffsets"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DOWN_SAMPLE_X4", this._floatTextureType); this.downSampleX4PostProcess.onApply = (effect: Effect) => { var id = 0; @@ -707,7 +731,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme // Brightpass Post-Process private _createBrightPassPostProcess(scene: Scene, ratio: number): void { var brightOffsets = new Array(8); - this.brightPassPostProcess = new PostProcess("HDRBrightPass", "standard", ["dsOffsets", "brightThreshold"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define BRIGHT_PASS", Constants.TEXTURETYPE_UNSIGNED_INT); + this.brightPassPostProcess = new PostProcess("HDRBrightPass", "standard", ["dsOffsets", "brightThreshold"], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define BRIGHT_PASS", this._floatTextureType); this.brightPassPostProcess.onApply = (effect: Effect) => { var sU = (1.0 / (this.brightPassPostProcess).width); @@ -731,11 +755,12 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme } // Create blur H&V post-processes - private _createBlurPostProcesses(scene: Scene, ratio: number, indice: number, blurWidthKey: string = "blurWidth"): void { + private _createBlurPostProcesses(scene: Scene, ratio: number, blurWidthKey: string = "blurWidth"): void { + var indice = this.blurHPostProcesses.length; var engine = scene.getEngine(); - var blurX = new BlurPostProcess("HDRBlurH" + "_" + indice, new Vector2(1, 0), (this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT); - var blurY = new BlurPostProcess("HDRBlurV" + "_" + indice, new Vector2(0, 1), (this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT); + var blurX = new BlurPostProcess("HDRBlurH" + "_" + indice, new Vector2(1, 0), (this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, this._floatTextureType); + var blurY = new BlurPostProcess("HDRBlurV" + "_" + indice, new Vector2(0, 1), (this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, this._floatTextureType); blurX.onActivateObservable.add(() => { let dw = blurX.width / engine.getRenderWidth(); @@ -756,20 +781,50 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme // Create texture adder post-process private _createTextureAdderPostProcess(scene: Scene, ratio: number): void { - this.textureAdderPostProcess = new PostProcess("HDRTextureAdder", "standard", ["exposure"], ["otherSampler", "lensSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define TEXTURE_ADDER", Constants.TEXTURETYPE_UNSIGNED_INT); + this.textureAdderPostProcess = new PostProcess("HDRTextureAdder", "standard", ["exposure"], ["otherSampler", "lensSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define TEXTURE_ADDER", this._floatTextureType); this.textureAdderPostProcess.onApply = (effect: Effect) => { - effect.setTextureFromPostProcess("otherSampler", this._vlsEnabled ? this._currentDepthOfFieldSource : this.originalPostProcess); + // effect.setTextureFromPostProcess("otherSampler", this._currentFinalColorSource); + effect.setTextureFromPostProcessOutput("otherSampler", this._currentFinalColorSource); effect.setTexture("lensSampler", this.lensTexture); effect.setFloat("exposure", this._currentExposure); - this._currentDepthOfFieldSource = this.textureAdderFinalPostProcess; + this._currentFinalColorSource = this.textureAdderPostProcess; }; // Add to pipeline this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRTextureAdder", () => { return this.textureAdderPostProcess; }, true)); } + // Creates the screen space reflection post-process + private _createScreenSpaceReflectionPostProcess(scene: Scene, ratio: number): void { + // Base post-process + this.screenSpaceReflectionPostProcess = new ScreenSpaceReflectionPostProcess("HDRScreenSpaceReflection", scene, ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, this._floatTextureType); + if (!this.screenSpaceReflectionPostProcess._geometryBufferRenderer || !this.screenSpaceReflectionPostProcess._geometryBufferRenderer.isSupported) { + return; + } + + this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRScreenSpaceReflection", () => { return this.screenSpaceReflectionPostProcess; }, true)); + + // Smooth + if (this._blurScreenSpaceReflection) { + this._createBlurPostProcesses(scene, ratio, "screenSpaceReflectionBlurWidth"); + } + + // Merge + this.screenSpaceReflectionMergePostProcess = new PostProcess( + "HDRScreenSpaceReflectionMerge", "standard", [], ["otherSampler", "roughnessSampler"], ratio, null, + Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define SSR_MERGE", this._floatTextureType + ); + this.screenSpaceReflectionMergePostProcess.onApply = (effect: Effect) => { + var gRenderer = this.screenSpaceReflectionPostProcess && this.screenSpaceReflectionPostProcess._geometryBufferRenderer; + gRenderer && effect.setTexture("roughnessSampler", gRenderer.getGBuffer().textures[gRenderer.getTextureIndex(GeometryBufferRenderer.ROUGHNESS_TEXTURE_TYPE)]); + effect.setTextureFromPostProcessOutput("otherSampler", this.originalPostProcess); + this._currentFinalColorSource = this.screenSpaceReflectionMergePostProcess; + }; + this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRScreenSpaceReflectionMerge", () => { return this.screenSpaceReflectionMergePostProcess; }, true)); + } + private _createVolumetricLightPostProcess(scene: Scene, ratio: number): void { var geometryRenderer = scene.enableGeometryBufferRenderer(); geometryRenderer.enablePosition = true; @@ -813,15 +868,15 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLS", () => { return this.volumetricLightPostProcess; }, true)); // Smooth - this._createBlurPostProcesses(scene, ratio / 4, 0, "volumetricLightBlurScale"); + this._createBlurPostProcesses(scene, ratio / 4, "volumetricLightBlurScale"); // Merge this.volumetricLightMergePostProces = new PostProcess("HDRVLSMerge", "standard", [], ["originalSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define VLSMERGE"); this.volumetricLightMergePostProces.onApply = (effect: Effect) => { - effect.setTextureFromPostProcess("originalSampler", this._bloomEnabled ? this.textureAdderFinalPostProcess : this.originalPostProcess); + effect.setTextureFromPostProcessOutput("originalSampler", this._currentFinalColorSource); - this._currentDepthOfFieldSource = this.volumetricLightFinalPostProcess; + this._currentFinalColorSource = this.volumetricLightMergePostProces; }; this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSMerge", () => { return this.volumetricLightMergePostProces; }, true)); @@ -914,14 +969,14 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme if (this._hdrAutoExposure) { defines.push("#define AUTO_EXPOSURE"); } - this.hdrPostProcess = new PostProcess("HDR", "standard", ["averageLuminance"], ["textureAdderSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, defines.join("\n"), Constants.TEXTURETYPE_UNSIGNED_INT); + this.hdrPostProcess = new PostProcess("HDR", "standard", ["averageLuminance"], ["textureAdderSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, defines.join("\n"), this._floatTextureType); var outputLiminance = 1; var time = 0; var lastTime = 0; this.hdrPostProcess.onApply = (effect: Effect) => { - effect.setTextureFromPostProcess("textureAdderSampler", this._currentDepthOfFieldSource); + effect.setTextureFromPostProcessOutput("textureAdderSampler", this._currentFinalColorSource); time += scene.getEngine().getDeltaTime(); @@ -950,7 +1005,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme lastTime = time; - this._currentDepthOfFieldSource = this.hdrFinalPostProcess; + this._currentFinalColorSource = this.hdrPostProcess; }; this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDR", () => { return this.hdrPostProcess; }, true)); @@ -958,19 +1013,19 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme // Create lens flare post-process private _createLensFlarePostProcess(scene: Scene, ratio: number): void { - this.lensFlarePostProcess = new PostProcess("HDRLensFlare", "standard", ["strength", "ghostDispersal", "haloWidth", "resolution", "distortionStrength"], ["lensColorSampler"], ratio / 2, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE", Constants.TEXTURETYPE_UNSIGNED_INT); + this.lensFlarePostProcess = new PostProcess("HDRLensFlare", "standard", ["strength", "ghostDispersal", "haloWidth", "resolution", "distortionStrength"], ["lensColorSampler"], ratio / 2, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE", this._floatTextureType); this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlare", () => { return this.lensFlarePostProcess; }, true)); - this._createBlurPostProcesses(scene, ratio / 4, 2, "lensFlareBlurWidth"); + this._createBlurPostProcesses(scene, ratio / 4, "lensFlareBlurWidth"); - this.lensFlareComposePostProcess = new PostProcess("HDRLensFlareCompose", "standard", ["lensStarMatrix"], ["otherSampler", "lensDirtSampler", "lensStarSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE_COMPOSE", Constants.TEXTURETYPE_UNSIGNED_INT); + this.lensFlareComposePostProcess = new PostProcess("HDRLensFlareCompose", "standard", ["lensStarMatrix"], ["otherSampler", "lensDirtSampler", "lensStarSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE_COMPOSE", this._floatTextureType); this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlareCompose", () => { return this.lensFlareComposePostProcess; }, true)); var resolution = new Vector2(0, 0); // Lens flare this.lensFlarePostProcess.onApply = (effect: Effect) => { - effect.setTextureFromPostProcess("textureSampler", this._bloomEnabled ? this.blurHPostProcesses[0] : this.originalPostProcess); + effect.setTextureFromPostProcessOutput("textureSampler", this._bloomEnabled ? this.blurHPostProcesses[0] : this._currentFinalColorSource); effect.setTexture("lensColorSampler", this.lensColorTexture); effect.setFloat("strength", this.lensFlareStrength); effect.setFloat("ghostDispersal", this.lensFlareGhostDispersal); @@ -1004,7 +1059,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme return; } - effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource); + effect.setTextureFromPostProcessOutput("otherSampler", this._currentFinalColorSource); effect.setTexture("lensDirtSampler", this.lensFlareDirtTexture); effect.setTexture("lensStarSampler", this.lensStarTexture); @@ -1025,15 +1080,15 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme effect.setMatrix("lensStarMatrix", lensStarMatrix); - this._currentDepthOfFieldSource = this.lensFlareFinalPostProcess; + this._currentFinalColorSource = this.lensFlareComposePostProcess; }; } // Create depth-of-field post-process private _createDepthOfFieldPostProcess(scene: Scene, ratio: number): void { - this.depthOfFieldPostProcess = new PostProcess("HDRDepthOfField", "standard", ["distance"], ["otherSampler", "depthSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DEPTH_OF_FIELD", Constants.TEXTURETYPE_UNSIGNED_INT); + this.depthOfFieldPostProcess = new PostProcess("HDRDepthOfField", "standard", ["distance"], ["otherSampler", "depthSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DEPTH_OF_FIELD", this._floatTextureType); this.depthOfFieldPostProcess.onApply = (effect: Effect) => { - effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource); + effect.setTextureFromPostProcessOutput("otherSampler", this._currentFinalColorSource); effect.setTexture("depthSampler", this._getDepthTexture()); effect.setFloat("distance", this.depthOfFieldDistance); @@ -1046,7 +1101,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme // Create motion blur post-process private _createMotionBlurPostProcess(scene: Scene, ratio: number): void { if (this._isObjectBasedMotionBlur) { - const mb = new MotionBlurPostProcess("HDRMotionBlur", scene, ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT); + const mb = new MotionBlurPostProcess("HDRMotionBlur", scene, ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, this._floatTextureType); mb.motionStrength = this.motionStrength; mb.motionBlurSamples = this.motionBlurSamples; this.motionBlurPostProcess = mb; @@ -1054,7 +1109,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme this.motionBlurPostProcess = new PostProcess("HDRMotionBlur", "standard", ["inverseViewProjection", "prevViewProjection", "screenSize", "motionScale", "motionStrength"], ["depthSampler"], - ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define MOTION_BLUR\n#define MAX_MOTION_SAMPLES " + this.motionBlurSamples.toFixed(1), Constants.TEXTURETYPE_UNSIGNED_INT); + ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define MOTION_BLUR\n#define MAX_MOTION_SAMPLES " + this.motionBlurSamples.toFixed(1), this._floatTextureType); var motionScale: number = 0; var prevViewProjection = Matrix.Identity(); @@ -1101,6 +1156,9 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme if (this.originalPostProcess) { this.originalPostProcess.dispose(camera); } + if (this.screenSpaceReflectionPostProcess) { this.screenSpaceReflectionPostProcess.dispose(camera); } + if (this.screenSpaceReflectionMergePostProcess) { this.screenSpaceReflectionMergePostProcess.dispose(camera); } + if (this.downSampleX4PostProcess) { this.downSampleX4PostProcess.dispose(camera); } if (this.brightPassPostProcess) { this.brightPassPostProcess.dispose(camera); } if (this.textureAdderPostProcess) { this.textureAdderPostProcess.dispose(camera); } @@ -1110,7 +1168,6 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme if (this.volumetricLightSmoothXPostProcess) { this.volumetricLightSmoothXPostProcess.dispose(camera); } if (this.volumetricLightSmoothYPostProcess) { this.volumetricLightSmoothYPostProcess.dispose(camera); } if (this.volumetricLightMergePostProces) { this.volumetricLightMergePostProces.dispose(camera); } - if (this.volumetricLightFinalPostProcess) { this.volumetricLightFinalPostProcess.dispose(camera); } if (this.lensFlarePostProcess) { this.lensFlarePostProcess.dispose(camera); } if (this.lensFlareComposePostProcess) { this.lensFlareComposePostProcess.dispose(camera); } @@ -1121,7 +1178,6 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme if (this.luminancePostProcess) { this.luminancePostProcess.dispose(camera); } if (this.hdrPostProcess) { this.hdrPostProcess.dispose(camera); } - if (this.hdrFinalPostProcess) { this.hdrFinalPostProcess.dispose(camera); } if (this.depthOfFieldPostProcess) { this.depthOfFieldPostProcess.dispose(camera); } @@ -1147,15 +1203,15 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme this.volumetricLightSmoothXPostProcess = null; this.volumetricLightSmoothYPostProcess = null; this.volumetricLightMergePostProces = null; - this.volumetricLightFinalPostProcess = null; this.lensFlarePostProcess = null; this.lensFlareComposePostProcess = null; this.luminancePostProcess = null; this.hdrPostProcess = null; - this.hdrFinalPostProcess = null; this.depthOfFieldPostProcess = null; this.motionBlurPostProcess = null; this.fxaaPostProcess = null; + this.screenSpaceReflectionPostProcess = null; + this.screenSpaceReflectionMergePostProcess = null; this.luminanceDownSamplePostProcesses = []; this.blurHPostProcesses = []; @@ -1174,8 +1230,8 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme } /** - * Serialize the rendering pipeline (Used when exporting) - * @returns the serialized object + * Serialize the rendering pipeline (Used when exporting). + * @returns the serialized object. */ public serialize(): any { var serializationObject = SerializationHelper.Serialize(this); @@ -1184,6 +1240,10 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme serializationObject.sourceLightId = this.sourceLight.id; } + if (this.screenSpaceReflectionPostProcess) { + serializationObject.screenSpaceReflectionPostProcess = SerializationHelper.Serialize(this.screenSpaceReflectionPostProcess); + } + serializationObject.customType = "StandardRenderingPipeline"; return serializationObject; @@ -1203,6 +1263,10 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme p.sourceLight = scene.getLightByID(source.sourceLightId); } + if (source.screenSpaceReflectionPostProcess) { + SerializationHelper.Parse(() => p.screenSpaceReflectionPostProcess, source.screenSpaceReflectionPostProcess, scene, rootUrl); + } + return p; } diff --git a/src/PostProcesses/index.ts b/src/PostProcesses/index.ts index 76814de4ec40..b6ea35c11ed6 100644 --- a/src/PostProcesses/index.ts +++ b/src/PostProcesses/index.ts @@ -28,4 +28,5 @@ export * from "./stereoscopicInterlacePostProcess"; export * from "./tonemapPostProcess"; export * from "./volumetricLightScatteringPostProcess"; export * from "./vrDistortionCorrectionPostProcess"; -export * from "./vrMultiviewToSingleviewPostProcess"; \ No newline at end of file +export * from "./vrMultiviewToSingleviewPostProcess"; +export * from "./screenSpaceReflectionPostProcess"; \ No newline at end of file diff --git a/src/PostProcesses/screenSpaceReflectionPostProcess.ts b/src/PostProcesses/screenSpaceReflectionPostProcess.ts new file mode 100644 index 000000000000..332c0ddb3cac --- /dev/null +++ b/src/PostProcesses/screenSpaceReflectionPostProcess.ts @@ -0,0 +1,195 @@ +import { Nullable } from "../types"; +import { Camera } from "../Cameras/camera"; +import { Effect } from "../Materials/effect"; +import { PostProcess, PostProcessOptions } from "./postProcess"; +import { Constants } from "../Engines/constants"; +import { Scene } from '../scene'; +import { GeometryBufferRenderer } from '../Rendering/geometryBufferRenderer'; +import { serialize } from '../Misc/decorators'; + +import "../Shaders/screenSpaceReflection.fragment"; + +declare type Engine = import("../Engines/engine").Engine; +/** + * The ScreenSpaceReflectionPostProcess performs realtime reflections using only and only the available informations on the screen (positions and normals). + * Basically, the screen space reflection post-process will compute reflections according the material's roughness. + */ +export class ScreenSpaceReflectionPostProcess extends PostProcess { + /** + * Gets or sets a reflection threshold mainly used to adjust the reflection's height. + */ + @serialize() + public threshold: number = 0; + /** + * Gets or sets the current reflection strength. 1.0 is an ideal value but can be increased/decreased for particular results. + */ + @serialize() + public strength: number = 1; + /** + * Gets or sets the falloff exponent used while computing fresnel. More the exponent is high, more the reflections will be discrete. + */ + @serialize() + public reflectionSpecularFalloffExponent: number = 1; + /** + * Gets or sets the step size used to iterate until the effect finds the color of the reflection's pixel. Typically in interval [0.1, 1.0] + */ + @serialize() + public step: number = 0.2; + + /** + * @hidden + */ + public _geometryBufferRenderer: Nullable; + + private _enableSmoothReflections: boolean = true; + private _reflectionSamples: number = 64; + private _smoothSteps: number = 5; + + /** + * Creates a new instance ConvolutionPostProcess + * @param name The name of the effect. + * @param scene The scene containing the objects to calculate reflections. + * @param options The required width/height ratio to downsize to before computing the render pass. + * @param camera The camera to apply the render pass to. + * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) + * @param engine The engine which the post process will be applied. (default: current engine) + * @param reusable If the post process can be reused on the same frame. (default: false) + * @param textureType Type of textures used when performing the post process. (default: 0) + * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) + */ + constructor(name: string, scene: Scene, options: number | PostProcessOptions, camera: Nullable, samplingMode?: number, engine?: Engine, reusable?: boolean, textureType: number = Constants.TEXTURETYPE_UNSIGNED_INT, blockCompilation = false) { + super(name, "screenSpaceReflection", [ + "projection", "view", "threshold", "reflectionSpecularFalloffExponent", "strength", "step" + ], [ + "textureSampler", "normalSampler", "positionSampler", "roughnessSampler" + ], options, camera, samplingMode, engine, reusable, + "#define SSR_SUPPORTED\n#define REFLECTION_SAMPLES 64\n#define SMOOTH_STEPS 5\n", + textureType, undefined, null, blockCompilation); + + // Get geometry buffer renderer and update effect + const geometryBufferRenderer = scene.enableGeometryBufferRenderer(); + if (geometryBufferRenderer) { + if (geometryBufferRenderer.isSupported) { + geometryBufferRenderer.enablePosition = true; + geometryBufferRenderer.enableRoughness = true; + this._geometryBufferRenderer = geometryBufferRenderer; + } + } + + this._updateEffectDefines(); + + // On apply, send uniforms + this.onApply = (effect: Effect) => { + if (!geometryBufferRenderer) { + return; + } + + // Samplers + const positionIndex = geometryBufferRenderer.getTextureIndex(GeometryBufferRenderer.POSITION_TEXTURE_TYPE); + const roughnessIndex = geometryBufferRenderer.getTextureIndex(GeometryBufferRenderer.ROUGHNESS_TEXTURE_TYPE); + + effect.setTexture("normalSampler", geometryBufferRenderer.getGBuffer().textures[1]); + effect.setTexture("positionSampler", geometryBufferRenderer.getGBuffer().textures[positionIndex]); + effect.setTexture("roughnessSampler", geometryBufferRenderer.getGBuffer().textures[roughnessIndex]); + + // Uniforms + const camera = scene.activeCamera; + if (!camera) { + return; + } + + const viewMatrix = camera.getViewMatrix(); + const projectionMatrix = camera.getProjectionMatrix(); + + effect.setMatrix("projection", projectionMatrix); + effect.setMatrix("view", viewMatrix); + effect.setFloat("threshold", this.threshold); + effect.setFloat("reflectionSpecularFalloffExponent", this.reflectionSpecularFalloffExponent); + effect.setFloat("strength", this.strength); + effect.setFloat("step", this.step); + }; + } + + /** + * Gets wether or not smoothing reflections is enabled. + * Enabling smoothing will require more GPU power and can generate a drop in FPS. + */ + @serialize() + public get enableSmoothReflections(): boolean { + return this._enableSmoothReflections; + } + + /** + * Sets wether or not smoothing reflections is enabled. + * Enabling smoothing will require more GPU power and can generate a drop in FPS. + */ + public set enableSmoothReflections(enabled: boolean) { + if (enabled === this._enableSmoothReflections) { + return; + } + + this._enableSmoothReflections = enabled; + this._updateEffectDefines(); + } + + /** + * Gets the number of samples taken while computing reflections. More samples count is high, + * more the post-process wil require GPU power and can generate a drop in FPS. + */ + @serialize() + public get reflectionSamples(): number { + return this._reflectionSamples; + } + + /** + * Sets the number of samples taken while computing reflections. More samples count is high, + * more the post-process wil require GPU power and can generate a drop in FPS. + */ + public set reflectionSamples(samples: number) { + if (samples === this._reflectionSamples) { + return; + } + + this._reflectionSamples = samples; + this._updateEffectDefines(); + } + + /** + * Gets the number of samples taken while smoothing reflections. More samples count is high, + * more the post-process will require GPU power and can generate a drop in FPS. + * Default value (5.0) work pretty well in all cases but can be adjust. + */ + @serialize() + public get smoothSteps(): number { + return this._smoothSteps; + } + + /* + * Sets the number of samples taken while smoothing reflections. More samples count is high, + * more the post-process will require GPU power and can generate a drop in FPS. + * Default value (5.0) work pretty well in all cases but can be adjust. + */ + public set smoothSteps(steps: number) { + if (steps === this._smoothSteps) { + return; + } + + this._smoothSteps = steps; + this._updateEffectDefines(); + } + + private _updateEffectDefines(): void { + const defines: string[] = []; + if (this._geometryBufferRenderer) { + defines.push("#define SSR_SUPPORTED"); + } + if (this._enableSmoothReflections) { + defines.push("#define ENABLE_SMOOTH_REFLECTIONS"); + } + + defines.push("#define REFLECTION_SAMPLES " + (this._reflectionSamples >> 0)); + defines.push("#define SMOOTH_STEPS " + (this._smoothSteps >> 0)); + + this.updateEffect(defines.join("\n")); + } +} diff --git a/src/Rendering/geometryBufferRenderer.ts b/src/Rendering/geometryBufferRenderer.ts index 486bfe554b41..b5841bf03b27 100644 --- a/src/Rendering/geometryBufferRenderer.ts +++ b/src/Rendering/geometryBufferRenderer.ts @@ -11,11 +11,13 @@ import { Material } from "../Materials/material"; import { MaterialHelper } from "../Materials/materialHelper"; import { Scene } from "../scene"; import { AbstractMesh } from "../Meshes/abstractMesh"; +import { Color4 } from '../Maths/math.color'; +import { StandardMaterial } from '../Materials/standardMaterial'; +import { PBRMaterial } from '../Materials/PBR/pbrMaterial'; import "../Shaders/geometry.fragment"; import "../Shaders/geometry.vertex"; import { _DevTools } from '../Misc/devTools'; -import { Color4 } from '../Maths/math.color'; /** @hidden */ interface ISavedTransformationMatrix { @@ -37,6 +39,11 @@ export class GeometryBufferRenderer { * using getIndex(GeometryBufferRenderer.VELOCITY_TEXTURE_INDEX) */ public static readonly VELOCITY_TEXTURE_TYPE = 2; + /** + * Constant used to retrieve the roughness texture index in the G-Buffer textures array + * using the getIndex(GeometryBufferRenderer.ROUGHNESS_TEXTURE_TYPE) + */ + public static readonly ROUGHNESS_TEXTURE_TYPE = 3; /** * Dictionary used to store the previous transformation matrices of each rendered mesh @@ -61,9 +68,11 @@ export class GeometryBufferRenderer { private _ratio: number; private _enablePosition: boolean = false; private _enableVelocity: boolean = false; + private _enableRoughness: boolean = false; private _positionIndex: number = -1; private _velocityIndex: number = -1; + private _roughnessIndex: number = -1; protected _effect: Effect; protected _cachedDefines: string; @@ -92,6 +101,7 @@ export class GeometryBufferRenderer { switch (textureType) { case GeometryBufferRenderer.POSITION_TEXTURE_TYPE: return this._positionIndex; case GeometryBufferRenderer.VELOCITY_TEXTURE_TYPE: return this._velocityIndex; + case GeometryBufferRenderer.ROUGHNESS_TEXTURE_TYPE: return this._roughnessIndex; default: return -1; } } @@ -133,6 +143,22 @@ export class GeometryBufferRenderer { this._createRenderTargets(); } + /** + * Gets a boolean indicating if objects roughness are enabled in the G buffer. + */ + public get enableRoughness(): boolean { + return this._enableRoughness; + } + + /** + * Sets wether or not objects roughness are enabled for the G buffer. + */ + public set enableRoughness(enable: boolean) { + this._enableRoughness = enable; + this.dispose(); + this._createRenderTargets(); + } + /** * Gets the scene associated with the buffer. */ @@ -175,28 +201,49 @@ export class GeometryBufferRenderer { * @returns true if ready otherwise false */ public isReady(subMesh: SubMesh, useInstances: boolean): boolean { - var material: any = subMesh.getMaterial(); + var material = subMesh.getMaterial(); if (material && material.disableDepthWrite) { return false; } var defines = []; - var attribs = [VertexBuffer.PositionKind, VertexBuffer.NormalKind]; - var mesh = subMesh.getMesh(); // Alpha test - if (material && material.needAlphaTesting()) { - defines.push("#define ALPHATEST"); - if (mesh.isVerticesDataPresent(VertexBuffer.UVKind)) { - attribs.push(VertexBuffer.UVKind); - defines.push("#define UV1"); + if (material) { + let needUv = false; + if (material.needAlphaBlending()) { + defines.push("#define ALPHATEST"); + needUv = true; } - if (mesh.isVerticesDataPresent(VertexBuffer.UV2Kind)) { - attribs.push(VertexBuffer.UV2Kind); - defines.push("#define UV2"); + + if (material.bumpTexture && StandardMaterial.BumpTextureEnabled) { + defines.push("#define BUMP"); + needUv = true; + } + + if (this._enableRoughness) { + if (material instanceof StandardMaterial && material.specularTexture) { + defines.push("#define HAS_SPECULAR"); + needUv = true; + } else if (material instanceof PBRMaterial && material.reflectivityTexture) { + defines.push("#define HAS_REFLECTIVITY"); + needUv = true; + } + } + + if (needUv) { + defines.push("#define NEED_UV"); + if (mesh.isVerticesDataPresent(VertexBuffer.UVKind)) { + attribs.push(VertexBuffer.UVKind); + defines.push("#define UV1"); + } + if (mesh.isVerticesDataPresent(VertexBuffer.UV2Kind)) { + attribs.push(VertexBuffer.UV2Kind); + defines.push("#define UV2"); + } } } @@ -214,6 +261,11 @@ export class GeometryBufferRenderer { } } + if (this._enableRoughness) { + defines.push("#define ROUGHNESS"); + defines.push("#define ROUGHNESS_INDEX " + this._roughnessIndex); + } + // Bones if (mesh.useBones && mesh.computeBonesUsingShaders) { attribs.push(VertexBuffer.MatricesIndicesKind); @@ -257,10 +309,13 @@ export class GeometryBufferRenderer { this._cachedDefines = join; this._effect = this._scene.getEngine().createEffect("geometry", attribs, - ["world", "mBones", "viewProjection", "diffuseMatrix", "view", "previousWorld", "previousViewProjection", "mPreviousBones", "morphTargetInfluences"], - ["diffuseSampler"], join, + [ + "world", "mBones", "viewProjection", "diffuseMatrix", "view", "previousWorld", "previousViewProjection", "mPreviousBones", + "morphTargetInfluences", "bumpMatrix", "roughnessMatrix", "vTangentSpaceParams", "vBumpInfos" + ], + ["diffuseSampler", "bumpSampler", "roughnessSampler"], join, undefined, undefined, undefined, - { buffersCount: this._enablePosition ? 3 : 2, maxSimultaneousMorphTargets: numMorphInfluencers }); + { buffersCount: this._multiRenderTarget.textures.length - 1, maxSimultaneousMorphTargets: numMorphInfluencers }); } return this._effect.isReady(); @@ -309,6 +364,11 @@ export class GeometryBufferRenderer { count++; } + if (this._enableRoughness) { + this._roughnessIndex = count; + count++; + } + this._multiRenderTarget = new MultiRenderTarget("gBuffer", { width: engine.getRenderWidth() * this._ratio, height: engine.getRenderHeight() * this._ratio }, count, this._scene, { generateMipMaps: false, generateDepthTexture: true, defaultType: Constants.TEXTURETYPE_FLOAT }); @@ -331,7 +391,7 @@ export class GeometryBufferRenderer { var mesh = subMesh.getRenderingMesh(); var scene = this._scene; var engine = scene.getEngine(); - let material = subMesh.getMaterial(); + let material = subMesh.getMaterial(); if (!material) { return; @@ -371,13 +431,33 @@ export class GeometryBufferRenderer { this._effect.setMatrix("viewProjection", scene.getTransformMatrix()); this._effect.setMatrix("view", scene.getViewMatrix()); - // Alpha test - if (material && material.needAlphaTesting()) { - var alphaTexture = material.getAlphaTestTexture(); + if (material) { + // Alpha test + if (material.needAlphaTesting()) { + var alphaTexture = material.getAlphaTestTexture(); + if (alphaTexture) { + this._effect.setTexture("diffuseSampler", alphaTexture); + this._effect.setMatrix("diffuseMatrix", alphaTexture.getTextureMatrix()); + } + } + + // Bump + if (material.bumpTexture && scene.getEngine().getCaps().standardDerivatives && StandardMaterial.BumpTextureEnabled) { + this._effect.setFloat3("vBumpInfos", material.bumpTexture.coordinatesIndex, 1.0 / material.bumpTexture.level, material.parallaxScaleBias); + this._effect.setMatrix("bumpMatrix", material.bumpTexture.getTextureMatrix()); + this._effect.setTexture("bumpSampler", material.bumpTexture); + this._effect.setFloat2("vTangentSpaceParams", material.invertNormalMapX ? -1.0 : 1.0, material.invertNormalMapY ? -1.0 : 1.0); + } - if (alphaTexture) { - this._effect.setTexture("diffuseSampler", alphaTexture); - this._effect.setMatrix("diffuseMatrix", alphaTexture.getTextureMatrix()); + // Roughness + if (this._enableRoughness) { + if (material instanceof StandardMaterial && material.specularTexture) { + this._effect.setMatrix("roughnessMatrix", material.specularTexture.getTextureMatrix()); + this._effect.setTexture("roughnessSampler", material.specularTexture); + } else if (material instanceof PBRMaterial && material.reflectivityTexture) { + this._effect.setMatrix("roughnessMatrix", material.reflectivityTexture.getTextureMatrix()); + this._effect.setTexture("roughnessSampler", material.reflectivityTexture); + } } } diff --git a/src/Shaders/geometry.fragment.fx b/src/Shaders/geometry.fragment.fx index 77b584d303de..f8f33f351b12 100644 --- a/src/Shaders/geometry.fragment.fx +++ b/src/Shaders/geometry.fragment.fx @@ -1,13 +1,23 @@ #extension GL_EXT_draw_buffers : require +#if defined(BUMP) || !defined(NORMAL) +#extension GL_OES_standard_derivatives : enable +#endif + precision highp float; precision highp int; +#ifdef BUMP +varying mat4 vWorldView; +varying vec3 vNormalW; +#else varying vec3 vNormalV; +#endif + varying vec4 vViewPos; -#ifdef POSITION -varying vec3 vPosition; +#if defined(POSITION) || defined(BUMP) +varying vec3 vPositionW; #endif #ifdef VELOCITY @@ -15,26 +25,45 @@ varying vec4 vCurrentPosition; varying vec4 vPreviousPosition; #endif -#ifdef ALPHATEST +#ifdef NEED_UV varying vec2 vUV; +#endif + +#ifdef BUMP +uniform vec3 vBumpInfos; +uniform vec2 vTangentSpaceParams; +#endif + +#ifdef ROUGHNESS +varying vec2 vRoughnessUV; +uniform sampler2D roughnessSampler; +#endif + +#ifdef ALPHATEST uniform sampler2D diffuseSampler; #endif #include[RENDER_TARGET_COUNT] +#include void main() { -#ifdef ALPHATEST + #ifdef ALPHATEST if (texture2D(diffuseSampler, vUV).a < 0.4) discard; -#endif + #endif - gl_FragData[0] = vec4(vViewPos.z / vViewPos.w, 0.0, 0.0, 1.0); - //color0 = vec4(vViewPos.z / vViewPos.w, 0.0, 0.0, 1.0); + gl_FragData[0] = vec4(vViewPos.z / vViewPos.w, 0.0, 0.0, vViewPos.w); + + #ifdef BUMP + vec3 normalW = normalize(vNormalW); + #include + gl_FragData[1] = vec4(normalize(vec3(vWorldView * vec4(normalW, 0.0))), 1.0); + #else gl_FragData[1] = vec4(normalize(vNormalV), 1.0); - //color2 = vec4(vPositionV, 1.0); + #endif #ifdef POSITION - gl_FragData[POSITION_INDEX] = vec4(vPosition, 1.0); + gl_FragData[POSITION_INDEX] = vec4(vPositionW, 1.0); #endif #ifdef VELOCITY @@ -46,4 +75,18 @@ void main() { gl_FragData[VELOCITY_INDEX] = vec4(velocity, 0.0, 1.0); #endif + + #ifdef ROUGHNESS + #ifdef HAS_SPECULAR + // Specular + vec4 roughness = vec4(texture2D(roughnessSampler, vRoughnessUV).rgb, 1.0); + #elif HAS_REFLECTIVITY + // Reflectivity + vec4 roughness = vec4(texture2D(roughnessSampler, vRoughnessUV).rgb, 1.0); + #else + vec4 roughness = vec4(0.0, 0.0, 0.0, 1.0); + #endif + + gl_FragData[ROUGHNESS_INDEX] = roughness; + #endif } \ No newline at end of file diff --git a/src/Shaders/geometry.vertex.fx b/src/Shaders/geometry.vertex.fx index 954967706e9b..44d87f40cb19 100644 --- a/src/Shaders/geometry.vertex.fx +++ b/src/Shaders/geometry.vertex.fx @@ -11,26 +11,48 @@ precision highp int; attribute vec3 position; attribute vec3 normal; -#if defined(ALPHATEST) || defined(NEED_UV) -varying vec2 vUV; -uniform mat4 diffuseMatrix; -#ifdef UV1 -attribute vec2 uv; -#endif -#ifdef UV2 -attribute vec2 uv2; -#endif +#ifdef NEED_UV + varying vec2 vUV; + + #ifdef ALPHATEST + uniform mat4 diffuseMatrix; + #endif + #ifdef BUMP + uniform mat4 bumpMatrix; + varying vec2 vBumpUV; + #endif + #ifdef ROUGHNESS + uniform mat4 roughnessMatrix; + varying vec2 vRoughnessUV; + #endif + + #ifdef UV1 + attribute vec2 uv; + #endif + + #ifdef UV2 + attribute vec2 uv2; + #endif #endif // Uniform uniform mat4 viewProjection; uniform mat4 view; +#ifdef BUMP +varying mat4 vWorldView; +#endif + +#ifdef BUMP +varying vec3 vNormalW; +#else varying vec3 vNormalV; +#endif + varying vec4 vViewPos; -#ifdef POSITION -varying vec3 vPosition; +#if defined(POSITION) || defined(BUMP) +varying vec3 vPositionW; #endif #ifdef VELOCITY @@ -66,7 +88,13 @@ void main(void) #include vec4 pos = vec4(finalWorld * vec4(positionUpdated, 1.0)); + #ifdef BUMP + vWorldView = view * finalWorld; + vNormalW = normalUpdated; + #else vNormalV = normalize(vec3((view * finalWorld) * vec4(normalUpdated, 0.0))); + #endif + vViewPos = view * pos; #if defined(VELOCITY) && defined(BONES_VELOCITY_ENABLED) @@ -105,17 +133,41 @@ void main(void) #endif #ifdef POSITION - vPosition = pos.xyz / pos.w; + vPositionW = pos.xyz / pos.w; #endif gl_Position = viewProjection * finalWorld * vec4(positionUpdated, 1.0); -#if defined(ALPHATEST) || defined(BASIC_RENDER) -#ifdef UV1 - vUV = vec2(diffuseMatrix * vec4(uvUpdated, 1.0, 0.0)); -#endif -#ifdef UV2 - vUV = vec2(diffuseMatrix * vec4(uv2, 1.0, 0.0)); -#endif -#endif + #ifdef NEED_UV + #ifdef UV1 + #ifdef ALPHATEST + vUV = vec2(diffuseMatrix * vec4(uvUpdated, 1.0, 0.0)); + #else + vUV = uv; + #endif + + #ifdef BUMP + vBumpUV = vec2(bumpMatrix * vec4(uvUpdated, 1.0, 0.0)); + #endif + #ifdef ROUGHNESS + vRoughnessUV = vec2(roughnessMatrix * vec4(uvUpdated, 1.0, 0.0)); + #endif + #endif + #ifdef UV2 + #ifdef ALPHATEST + vUV = vec2(diffuseMatrix * vec4(uv2, 1.0, 0.0)); + #else + vUV = uv2; + #endif + + #ifdef BUMP + vBumpUV = vec2(bumpMatrix * vec4(uv2, 1.0, 0.0)); + #endif + #ifdef ROUGHNESS + vRoughnessUV = vec2(roughnessMatrix * vec4(uv2, 1.0, 0.0)); + #endif + #endif + #endif + + #include } diff --git a/src/Shaders/screenSpaceReflection.fragment.fx b/src/Shaders/screenSpaceReflection.fragment.fx new file mode 100644 index 000000000000..28fe6c7ef67f --- /dev/null +++ b/src/Shaders/screenSpaceReflection.fragment.fx @@ -0,0 +1,166 @@ +// Screen Space Reflection Post-Process based on the tutorial +// http://imanolfotia.com/blog/update/2017/03/11/ScreenSpaceReflections.html + +uniform sampler2D textureSampler; +uniform sampler2D normalSampler; +uniform sampler2D positionSampler; +uniform sampler2D roughnessSampler; + +uniform mat4 view; +uniform mat4 projection; + +uniform float step; +uniform float threshold; +uniform float strength; +uniform float reflectionSpecularFalloffExponent; + +// Varyings +varying vec2 vUV; + +// Constants +const float minRayStep = 0.001; + +struct ReflectionInfo { + vec3 color; + vec4 coords; +}; + +/** + * According to specular, see https://en.wikipedia.org/wiki/Schlick%27s_approximation + */ +vec3 fresnelSchlick(float cosTheta, vec3 F0) +{ + return F0 + (1.0 - F0) * pow(1.0 - cosTheta, 5.0); +} + +/** + * Once the pixel's coordinates has been found, let's adjust (smooth) a little bit + * by sampling multiple reflection pixels. + */ +ReflectionInfo smoothReflectionInfo(vec3 dir, vec3 hitCoord) +{ + ReflectionInfo info; + info.color = vec3(0.0); + + vec4 projectedCoord; + float sampledDepth; + + for(int i = 0; i < SMOOTH_STEPS; i++) + { + projectedCoord = projection * vec4(hitCoord, 1.0); + projectedCoord.xy /= projectedCoord.w; + projectedCoord.xy = 0.5 * projectedCoord.xy + vec2(0.5); + + sampledDepth = (view * texture2D(textureSampler, projectedCoord.xy)).z; + + float depth = sampledDepth - hitCoord.z; + + dir *= 0.5; + if(depth > 0.0) + hitCoord -= dir; + else + hitCoord += dir; + + info.color += texture2D(textureSampler, projectedCoord.xy).rgb; + } + + projectedCoord = projection * vec4(hitCoord, 1.0); + projectedCoord.xy /= projectedCoord.w; + projectedCoord.xy = 0.5 * projectedCoord.xy + vec2(0.5); + + // Merge colors + info.coords = vec4(projectedCoord.xy, sampledDepth, 1.0); + info.color += texture2D(textureSampler, projectedCoord.xy).rgb; + info.color /= float(SMOOTH_STEPS + 1); + return info; +} + +/** + * Tests the given world position (hitCoord) according to the given reflection vector (dir) + * until it finds a collision (means that depth is enough close to say "it's the pixel to sample!"). + */ +ReflectionInfo getReflectionInfo(vec3 dir, vec3 hitCoord) +{ + ReflectionInfo info; + vec4 projectedCoord; + float sampledDepth; + + dir *= step; + + for(int i = 0; i < REFLECTION_SAMPLES; i++) + { + hitCoord += dir; + + projectedCoord = projection * vec4(hitCoord, 1.0); + projectedCoord.xy /= projectedCoord.w; + projectedCoord.xy = 0.5 * projectedCoord.xy + vec2(0.5); + + sampledDepth = (view * texture2D(positionSampler, projectedCoord.xy)).z; + + float depth = sampledDepth - hitCoord.z; + + if(((depth - dir.z) < threshold) && depth <= 0.0) + { + #ifdef ENABLE_SMOOTH_REFLECTIONS + return smoothReflectionInfo(dir, hitCoord); + #else + info.color = texture2D(textureSampler, projectedCoord.xy).rgb; + info.coords = vec4(projectedCoord.xy, sampledDepth, 0.0); + return info; + #endif + } + } + + info.color = texture2D(textureSampler, projectedCoord.xy).rgb; + info.coords = vec4(projectedCoord.xy, sampledDepth, 0.0); + return info; +} + +vec3 hash(vec3 a) +{ + #define Scale vec3(.8, .8, .8) + #define K 19.19 + + vec3 p3 = fract(a * Scale); + p3 += dot(p3, p3.yzx+K); + return fract((p3.xxy+p3.yzz)*p3.zyx); +} + +void main() +{ + #ifdef SSR_SUPPORTED + // Intensity + float spec = texture2D(roughnessSampler, vUV).r; + if (spec == 0.0) { + gl_FragColor = vec4(0.0, 0.0, 0.0, 0.0); + return; + } + + // Get coordinates of the pixel to pick according to the pixel's position and normal. + vec3 albedo = texture2D(textureSampler, vUV).rgb; + vec3 normal = (texture2D(normalSampler, vUV)).xyz; + vec3 position = (view * texture2D(positionSampler, vUV)).xyz; + + vec3 reflected = normalize(reflect(normalize(position), normalize(normal))); + + // vec3 jitt = mix(vec3(0.0), hash(position), spec); + // ReflectionInfo info = getReflectionInfo(jitt + reflected, position); + ReflectionInfo info = getReflectionInfo(reflected, position); + + vec2 dCoords = smoothstep(0.2, 0.6, abs(vec2(0.5, 0.5) - info.coords.xy)); + float screenEdgefactor = clamp(1.0 - (dCoords.x + dCoords.y), 0.0, 1.0); + + // Fresnel + vec3 F0 = vec3(0.04); + F0 = mix(F0, albedo, spec); + vec3 fresnel = fresnelSchlick(max(dot(normalize(normal), normalize(position)), 0.0), F0); + + // Apply + float reflectionMultiplier = clamp(pow(spec * strength, reflectionSpecularFalloffExponent) * screenEdgefactor * reflected.z, 0.0, 0.9); + vec3 SSR = info.color * fresnel; + + gl_FragColor = vec4(SSR, reflectionMultiplier); + #else + gl_FragColor = texture2D(textureSampler, vUV); + #endif +} diff --git a/src/Shaders/standard.fragment.fx b/src/Shaders/standard.fragment.fx index 8dc9ecb5a68c..13d07f13503e 100644 --- a/src/Shaders/standard.fragment.fx +++ b/src/Shaders/standard.fragment.fx @@ -88,6 +88,21 @@ void main(void) } #endif +#if defined(SSR_MERGE) +uniform sampler2D otherSampler; +uniform sampler2D roughnessSampler; + +void main(void) +{ + vec4 ssr = texture2D(textureSampler, vUV); + vec4 color = texture2D(otherSampler, vUV); + + float di = 1.0 - ssr.a; + + gl_FragColor = vec4((color.rgb * di) + (ssr.rgb * ssr.a), 1.0); +} +#endif + #if defined(VLS) #define PI 3.1415926535897932384626433832795