diff --git a/src/PostProcesses/RenderPipeline/Pipelines/standardRenderingPipeline.ts b/src/PostProcesses/RenderPipeline/Pipelines/standardRenderingPipeline.ts index ae496df275d..155ea3275e2 100644 --- a/src/PostProcesses/RenderPipeline/Pipelines/standardRenderingPipeline.ts +++ b/src/PostProcesses/RenderPipeline/Pipelines/standardRenderingPipeline.ts @@ -20,6 +20,7 @@ import { Scene } from "../../../scene"; import { Constants } from "../../../Engines/constants"; import { _TypeStore } from '../../../Misc/typeStore'; import { MotionBlurPostProcess } from "../../motionBlurPostProcess"; +import { ScreenSpaceReflectionPostProcess } from "../../screenSpaceReflectionPostProcess"; declare type Animation = import("../../../Animations/animation").Animation; @@ -77,10 +78,6 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme * Post-process used to merge the volumetric light effect and the real scene color */ public volumetricLightMergePostProces: Nullable = null; - /** - * Post-process used to store the final volumetric light post-process (attach/detach for debug purpose) - */ - public volumetricLightFinalPostProcess: Nullable = null; /** * Base post-process used to calculate the average luminance of the final image for HDR @@ -100,14 +97,6 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme * Post-process used to store the final texture adder post-process (attach/detach for debug purpose) */ public textureAdderFinalPostProcess: Nullable = null; - /** - * Post-process used to store the final lens flare post-process (attach/detach for debug purpose) - */ - public lensFlareFinalPostProcess: Nullable = null; - /** - * Post-process used to merge the final HDR post-process and the real scene color - */ - public hdrFinalPostProcess: Nullable = null; /** * Post-process used to create a lens flare effect */ @@ -129,6 +118,9 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme */ public fxaaPostProcess: Nullable = null; + public screenSpaceReflectionPostProcess: Nullable = null; + public screenSpaceReflectionMergePostProcess: Nullable = null; + // Values /** @@ -319,6 +311,12 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme } } + /** + * Gets how much the reflections are blurred. + */ + @serialize() + public screenSpaceReflectionBlurWidth: number = 8; + /** * List of animations for the pipeline (IAnimatable implementation) */ @@ -328,7 +326,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme * Private members */ private _scene: Scene; - private _currentDepthOfFieldSource: Nullable = null; + private _currentFinalColorSource: Nullable = null; private _basePostProcess: Nullable; private _fixedExposure: number = 1.0; @@ -353,6 +351,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme private _hdrEnabled: boolean = false; private _motionBlurEnabled: boolean = false; private _fxaaEnabled: boolean = false; + private _screenSpaceReflectionEnabled: boolean = false; private _motionBlurSamples: number = 64.0; private _volumetricLightStepsCount: number = 50.0; @@ -491,6 +490,23 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme this._buildPipeline(); } + /** + * Specifies if screen space reflections is enabled. + */ + @serialize() + public get screenSpaceReflectionEnabled(): boolean { + return this._screenSpaceReflectionEnabled; + } + + public set screenSpaceReflectionEnabled(enabled: boolean) { + if (this._screenSpaceReflectionEnabled === enabled) { + return; + } + + this._screenSpaceReflectionEnabled = enabled; + this._buildPipeline(); + } + /** * Specifies the number of steps used to calculate the volumetric lights * Typically in interval [50, 200] @@ -590,18 +606,22 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme if (!this._basePostProcess) { this.originalPostProcess = new PostProcess("HDRPass", "standard", [], [], ratio, null, Constants.TEXTURE_BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", this._floatTextureType); this.originalPostProcess.onApply = () => { - this._currentDepthOfFieldSource = this.originalPostProcess; + this._currentFinalColorSource = this.originalPostProcess; }; } else { this.originalPostProcess = this._basePostProcess; } - if (this._bloomEnabled || this._vlsEnabled || this._lensFlareEnabled || this._depthOfFieldEnabled || this._motionBlurEnabled) { + if (this._bloomEnabled || this._vlsEnabled || this._lensFlareEnabled || this._depthOfFieldEnabled || this._motionBlurEnabled || this._screenSpaceReflectionEnabled) { this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPassPostProcess", () => { return this.originalPostProcess; }, true)); } - this._currentDepthOfFieldSource = this.originalPostProcess; + this._currentFinalColorSource = this.originalPostProcess; + + if (this._screenSpaceReflectionEnabled) { + this._createScreenSpaceReflectionPostProcess(scene, ratio); + } if (this._bloomEnabled) { // Create down sample X4 post-process @@ -611,7 +631,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme this._createBrightPassPostProcess(scene, ratio / 2); // Create gaussian blur post-processes (down sampling blurs) - this._createBlurPostProcesses(scene, ratio / 4, 1); + this._createBlurPostProcesses(scene, ratio / 4); // Create texture adder post-process this._createTextureAdderPostProcess(scene, ratio); @@ -624,19 +644,11 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme if (this._vlsEnabled) { // Create volumetric light this._createVolumetricLightPostProcess(scene, ratio); - - // Create volumetric light final post-process - this.volumetricLightFinalPostProcess = new PostProcess("HDRVLSFinal", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT); - this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSFinal", () => { return this.volumetricLightFinalPostProcess; }, true)); } if (this._lensFlareEnabled) { // Create lens flare post-process this._createLensFlarePostProcess(scene, ratio); - - // Create depth-of-field source post-process post lens-flare and disable it now - this.lensFlareFinalPostProcess = new PostProcess("HDRPostLensFlareDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT); - this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostLensFlareDepthOfFieldSource", () => { return this.lensFlareFinalPostProcess; }, true)); } if (this._hdrEnabled) { @@ -645,15 +657,11 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme // Create HDR this._createHdrPostProcess(scene, ratio); - - // Create depth-of-field source post-process post hdr and disable it now - this.hdrFinalPostProcess = new PostProcess("HDRPostHDReDepthOfFieldSource", "standard", [], [], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define PASS_POST_PROCESS", Constants.TEXTURETYPE_UNSIGNED_INT); - this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRPostHDReDepthOfFieldSource", () => { return this.hdrFinalPostProcess; }, true)); } if (this._depthOfFieldEnabled) { // Create gaussian blur used by depth-of-field - this._createBlurPostProcesses(scene, ratio / 2, 3, "depthOfFieldBlurWidth"); + this._createBlurPostProcesses(scene, ratio / 2, "depthOfFieldBlurWidth"); // Create depth-of-field post-process this._createDepthOfFieldPostProcess(scene, ratio); @@ -731,7 +739,8 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme } // Create blur H&V post-processes - private _createBlurPostProcesses(scene: Scene, ratio: number, indice: number, blurWidthKey: string = "blurWidth"): void { + private _createBlurPostProcesses(scene: Scene, ratio: number, blurWidthKey: string = "blurWidth"): void { + var indice = this.blurHPostProcesses.length; var engine = scene.getEngine(); var blurX = new BlurPostProcess("HDRBlurH" + "_" + indice, new Vector2(1, 0), (this)[blurWidthKey], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, Constants.TEXTURETYPE_UNSIGNED_INT); @@ -758,18 +767,40 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme private _createTextureAdderPostProcess(scene: Scene, ratio: number): void { this.textureAdderPostProcess = new PostProcess("HDRTextureAdder", "standard", ["exposure"], ["otherSampler", "lensSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define TEXTURE_ADDER", Constants.TEXTURETYPE_UNSIGNED_INT); this.textureAdderPostProcess.onApply = (effect: Effect) => { - effect.setTextureFromPostProcess("otherSampler", this._vlsEnabled ? this._currentDepthOfFieldSource : this.originalPostProcess); + // effect.setTextureFromPostProcess("otherSampler", this._currentFinalColorSource); + effect.setTextureFromPostProcessOutput("otherSampler", this._currentFinalColorSource); effect.setTexture("lensSampler", this.lensTexture); effect.setFloat("exposure", this._currentExposure); - this._currentDepthOfFieldSource = this.textureAdderFinalPostProcess; + this._currentFinalColorSource = this.textureAdderFinalPostProcess; }; // Add to pipeline this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRTextureAdder", () => { return this.textureAdderPostProcess; }, true)); } + // Creates the screen space reflection post-process + private _createScreenSpaceReflectionPostProcess(scene: Scene, ratio: number): void { + // Base post-process + this.screenSpaceReflectionPostProcess = new ScreenSpaceReflectionPostProcess("HDRScreenSpaceReflection", scene, ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, this._floatTextureType); + this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRScreenSpaceReflection", () => { return this.screenSpaceReflectionPostProcess; }, true)); + + // Smooth + this._createBlurPostProcesses(scene, ratio / 4, "screenSpaceReflectionBlurWidth"); + + // Merge + this.screenSpaceReflectionMergePostProcess = new PostProcess( + "HDRScreenSpaceReflectionMerge", "standard", [], ["otherSampler"], ratio, null, + Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define MERGE_POST_PROCESS" + ); + this.screenSpaceReflectionMergePostProcess.onApply = (effect: Effect) => { + effect.setTextureFromPostProcessOutput("otherSampler", this.originalPostProcess); + this._currentFinalColorSource = this.screenSpaceReflectionMergePostProcess; + }; + this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRScreenSpaceReflectionMerge", () => { return this.screenSpaceReflectionMergePostProcess; }, true)); + } + private _createVolumetricLightPostProcess(scene: Scene, ratio: number): void { var geometryRenderer = scene.enableGeometryBufferRenderer(); geometryRenderer.enablePosition = true; @@ -813,15 +844,15 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLS", () => { return this.volumetricLightPostProcess; }, true)); // Smooth - this._createBlurPostProcesses(scene, ratio / 4, 0, "volumetricLightBlurScale"); + this._createBlurPostProcesses(scene, ratio / 4, "volumetricLightBlurScale"); // Merge this.volumetricLightMergePostProces = new PostProcess("HDRVLSMerge", "standard", [], ["originalSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define VLSMERGE"); this.volumetricLightMergePostProces.onApply = (effect: Effect) => { - effect.setTextureFromPostProcess("originalSampler", this._bloomEnabled ? this.textureAdderFinalPostProcess : this.originalPostProcess); + effect.setTextureFromPostProcessOutput("originalSampler", this._currentFinalColorSource); - this._currentDepthOfFieldSource = this.volumetricLightFinalPostProcess; + this._currentFinalColorSource = this.volumetricLightMergePostProces; }; this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRVLSMerge", () => { return this.volumetricLightMergePostProces; }, true)); @@ -921,7 +952,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme var lastTime = 0; this.hdrPostProcess.onApply = (effect: Effect) => { - effect.setTextureFromPostProcess("textureAdderSampler", this._currentDepthOfFieldSource); + effect.setTextureFromPostProcessOutput("textureAdderSampler", this._currentFinalColorSource); time += scene.getEngine().getDeltaTime(); @@ -950,7 +981,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme lastTime = time; - this._currentDepthOfFieldSource = this.hdrFinalPostProcess; + this._currentFinalColorSource = this.hdrPostProcess; }; this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDR", () => { return this.hdrPostProcess; }, true)); @@ -961,7 +992,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme this.lensFlarePostProcess = new PostProcess("HDRLensFlare", "standard", ["strength", "ghostDispersal", "haloWidth", "resolution", "distortionStrength"], ["lensColorSampler"], ratio / 2, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE", Constants.TEXTURETYPE_UNSIGNED_INT); this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlare", () => { return this.lensFlarePostProcess; }, true)); - this._createBlurPostProcesses(scene, ratio / 4, 2, "lensFlareBlurWidth"); + this._createBlurPostProcesses(scene, ratio / 4, "lensFlareBlurWidth"); this.lensFlareComposePostProcess = new PostProcess("HDRLensFlareCompose", "standard", ["lensStarMatrix"], ["otherSampler", "lensDirtSampler", "lensStarSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define LENS_FLARE_COMPOSE", Constants.TEXTURETYPE_UNSIGNED_INT); this.addEffect(new PostProcessRenderEffect(scene.getEngine(), "HDRLensFlareCompose", () => { return this.lensFlareComposePostProcess; }, true)); @@ -970,7 +1001,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme // Lens flare this.lensFlarePostProcess.onApply = (effect: Effect) => { - effect.setTextureFromPostProcess("textureSampler", this._bloomEnabled ? this.blurHPostProcesses[0] : this.originalPostProcess); + effect.setTextureFromPostProcessOutput("textureSampler", this._bloomEnabled ? this.blurHPostProcesses[0] : this._currentFinalColorSource); effect.setTexture("lensColorSampler", this.lensColorTexture); effect.setFloat("strength", this.lensFlareStrength); effect.setFloat("ghostDispersal", this.lensFlareGhostDispersal); @@ -1004,7 +1035,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme return; } - effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource); + effect.setTextureFromPostProcessOutput("otherSampler", this._currentFinalColorSource); effect.setTexture("lensDirtSampler", this.lensFlareDirtTexture); effect.setTexture("lensStarSampler", this.lensStarTexture); @@ -1025,7 +1056,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme effect.setMatrix("lensStarMatrix", lensStarMatrix); - this._currentDepthOfFieldSource = this.lensFlareFinalPostProcess; + this._currentFinalColorSource = this.lensFlareComposePostProcess; }; } @@ -1033,7 +1064,7 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme private _createDepthOfFieldPostProcess(scene: Scene, ratio: number): void { this.depthOfFieldPostProcess = new PostProcess("HDRDepthOfField", "standard", ["distance"], ["otherSampler", "depthSampler"], ratio, null, Texture.BILINEAR_SAMPLINGMODE, scene.getEngine(), false, "#define DEPTH_OF_FIELD", Constants.TEXTURETYPE_UNSIGNED_INT); this.depthOfFieldPostProcess.onApply = (effect: Effect) => { - effect.setTextureFromPostProcess("otherSampler", this._currentDepthOfFieldSource); + effect.setTextureFromPostProcessOutput("otherSampler", this._currentFinalColorSource); effect.setTexture("depthSampler", this._getDepthTexture()); effect.setFloat("distance", this.depthOfFieldDistance); @@ -1101,6 +1132,9 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme if (this.originalPostProcess) { this.originalPostProcess.dispose(camera); } + if (this.screenSpaceReflectionPostProcess) { this.screenSpaceReflectionPostProcess.dispose(camera); } + if (this.screenSpaceReflectionMergePostProcess) { this.screenSpaceReflectionMergePostProcess.dispose(camera); } + if (this.downSampleX4PostProcess) { this.downSampleX4PostProcess.dispose(camera); } if (this.brightPassPostProcess) { this.brightPassPostProcess.dispose(camera); } if (this.textureAdderPostProcess) { this.textureAdderPostProcess.dispose(camera); } @@ -1110,7 +1144,6 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme if (this.volumetricLightSmoothXPostProcess) { this.volumetricLightSmoothXPostProcess.dispose(camera); } if (this.volumetricLightSmoothYPostProcess) { this.volumetricLightSmoothYPostProcess.dispose(camera); } if (this.volumetricLightMergePostProces) { this.volumetricLightMergePostProces.dispose(camera); } - if (this.volumetricLightFinalPostProcess) { this.volumetricLightFinalPostProcess.dispose(camera); } if (this.lensFlarePostProcess) { this.lensFlarePostProcess.dispose(camera); } if (this.lensFlareComposePostProcess) { this.lensFlareComposePostProcess.dispose(camera); } @@ -1121,7 +1154,6 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme if (this.luminancePostProcess) { this.luminancePostProcess.dispose(camera); } if (this.hdrPostProcess) { this.hdrPostProcess.dispose(camera); } - if (this.hdrFinalPostProcess) { this.hdrFinalPostProcess.dispose(camera); } if (this.depthOfFieldPostProcess) { this.depthOfFieldPostProcess.dispose(camera); } @@ -1147,15 +1179,15 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme this.volumetricLightSmoothXPostProcess = null; this.volumetricLightSmoothYPostProcess = null; this.volumetricLightMergePostProces = null; - this.volumetricLightFinalPostProcess = null; this.lensFlarePostProcess = null; this.lensFlareComposePostProcess = null; this.luminancePostProcess = null; this.hdrPostProcess = null; - this.hdrFinalPostProcess = null; this.depthOfFieldPostProcess = null; this.motionBlurPostProcess = null; this.fxaaPostProcess = null; + this.screenSpaceReflectionPostProcess = null; + this.screenSpaceReflectionMergePostProcess = null; this.luminanceDownSamplePostProcesses = []; this.blurHPostProcesses = []; @@ -1174,8 +1206,8 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme } /** - * Serialize the rendering pipeline (Used when exporting) - * @returns the serialized object + * Serialize the rendering pipeline (Used when exporting). + * @returns the serialized object. */ public serialize(): any { var serializationObject = SerializationHelper.Serialize(this); @@ -1184,6 +1216,10 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme serializationObject.sourceLightId = this.sourceLight.id; } + if (this.screenSpaceReflectionPostProcess) { + serializationObject.screenSpaceReflectionPostProcess = SerializationHelper.Serialize(this.screenSpaceReflectionPostProcess); + } + serializationObject.customType = "StandardRenderingPipeline"; return serializationObject; @@ -1203,6 +1239,10 @@ export class StandardRenderingPipeline extends PostProcessRenderPipeline impleme p.sourceLight = scene.getLightByID(source.sourceLightId); } + if (source.screenSpaceReflectionPostProcess) { + SerializationHelper.Parse(() => p.screenSpaceReflectionPostProcess, source.screenSpaceReflectionPostProcess, scene, rootUrl); + } + return p; } diff --git a/src/PostProcesses/index.ts b/src/PostProcesses/index.ts index 76814de4ec4..b6ea35c11ed 100644 --- a/src/PostProcesses/index.ts +++ b/src/PostProcesses/index.ts @@ -28,4 +28,5 @@ export * from "./stereoscopicInterlacePostProcess"; export * from "./tonemapPostProcess"; export * from "./volumetricLightScatteringPostProcess"; export * from "./vrDistortionCorrectionPostProcess"; -export * from "./vrMultiviewToSingleviewPostProcess"; \ No newline at end of file +export * from "./vrMultiviewToSingleviewPostProcess"; +export * from "./screenSpaceReflectionPostProcess"; \ No newline at end of file diff --git a/src/PostProcesses/screenSpaceReflectionPostProcess.ts b/src/PostProcesses/screenSpaceReflectionPostProcess.ts new file mode 100644 index 00000000000..a2ca2b8dd1b --- /dev/null +++ b/src/PostProcesses/screenSpaceReflectionPostProcess.ts @@ -0,0 +1,160 @@ +import { Nullable } from "../types"; +import { Camera } from "../Cameras/camera"; +import { Effect } from "../Materials/effect"; +import { PostProcess, PostProcessOptions } from "./postProcess"; +import { Constants } from "../Engines/constants"; +import { Scene } from '../scene'; +import { GeometryBufferRenderer } from '../Rendering/geometryBufferRenderer'; +import { serialize } from '../Misc/decorators'; + +import "../Shaders/screenSpaceReflection.fragment"; + + +declare type Engine = import("../Engines/engine").Engine; +/** + * The SharpenPostProcess applies a sharpen kernel to every pixel + * See http://en.wikipedia.org/wiki/Kernel_(image_processing) + */ +export class ScreenSpaceReflectionPostProcess extends PostProcess { + /** + * Gets or sets a reflection threshold mainly used to adjust the reflection's height. + */ + @serialize() + public threshold: number = 0; + /** + * Gets or sets the current reflection strength. 1.0 is an ideal value but can be increased/decreased for particular results. + */ + @serialize() + public strength: number = 1; + /** + * Gets or sets the falloff exponent used while computing fresnel. More the exponent is high, more the reflections will be discrete. + */ + @serialize() + public reflectionSpecularFalloffExponent: number = 3; + + private _geometryBufferRenderer: Nullable; + private _enableSmoothReflections: boolean = true; + private _reflectionSamples: number = 64; + + /** + * Creates a new instance ConvolutionPostProcess + * @param name The name of the effect. + * @param scene The scene containing the objects to calculate reflections. + * @param options The required width/height ratio to downsize to before computing the render pass. + * @param camera The camera to apply the render pass to. + * @param samplingMode The sampling mode to be used when computing the pass. (default: 0) + * @param engine The engine which the post process will be applied. (default: current engine) + * @param reusable If the post process can be reused on the same frame. (default: false) + * @param textureType Type of textures used when performing the post process. (default: 0) + * @param blockCompilation If compilation of the shader should not be done in the constructor. The updateEffect method can be used to compile the shader at a later time. (default: false) + */ + constructor(name: string, scene: Scene, options: number | PostProcessOptions, camera: Nullable, samplingMode?: number, engine?: Engine, reusable?: boolean, textureType: number = Constants.TEXTURETYPE_UNSIGNED_INT, blockCompilation = false) { + super(name, "screenSpaceReflection", [ + "projection", "view", "threshold", "reflectionSpecularFalloffExponent", "strength" + ], [ + "textureSampler", "normalSampler", "positionSampler", "roughnessSampler" + ], options, camera, samplingMode, engine, reusable, + "#define SSR_SUPPORTED\n#define REFLECTION_SAMPLES 64\n", + textureType, undefined, null, blockCompilation); + + // Get geometry buffer renderer and update effect + const geometryBufferRenderer = scene.enableGeometryBufferRenderer(); + if (geometryBufferRenderer) { + if (geometryBufferRenderer.isSupported) { + geometryBufferRenderer.enablePosition = true; + geometryBufferRenderer.enableRoughness = true; + this._geometryBufferRenderer = geometryBufferRenderer; + } + } + + this._updateEffectDefines(); + + // On apply, send uniforms + this.onApply = (effect: Effect) => { + if (!geometryBufferRenderer) { + return; + } + + // Samplers + const positionIndex = geometryBufferRenderer.getTextureIndex(GeometryBufferRenderer.POSITION_TEXTURE_TYPE); + const roughnessIndex = geometryBufferRenderer.getTextureIndex(GeometryBufferRenderer.ROUGHNESS_TEXTURE_TYPE); + + effect.setTexture("normalSampler", geometryBufferRenderer.getGBuffer().textures[1]); + effect.setTexture("positionSampler", geometryBufferRenderer.getGBuffer().textures[positionIndex]); + effect.setTexture("roughnessSampler", geometryBufferRenderer.getGBuffer().textures[roughnessIndex]); + + // Uniforms + const camera = scene.activeCamera; + if (!camera) { + return; + } + + const viewMatrix = camera.getViewMatrix(); + const projectionMatrix = camera.getProjectionMatrix(); + + effect.setMatrix("projection", projectionMatrix); + effect.setMatrix("view", viewMatrix); + effect.setFloat("threshold", this.threshold); + effect.setFloat("reflectionSpecularFalloffExponent", this.reflectionSpecularFalloffExponent); + effect.setFloat("strength", this.strength); + }; + } + + /** + * Gets wether or not smoothing reflections is enabled. + * Enabling smoothing will require more GPU power and can generate a drop in FPS. + */ + @serialize() + public get enableSmoothReflections(): boolean { + return this._enableSmoothReflections; + } + + /** + * Sets wether or not smoothing reflections is enabled. + * Enabling smoothing will require more GPU power and can generate a drop in FPS. + */ + public set enableSmoothReflections(enabled: boolean) { + if (enabled === this._enableSmoothReflections) { + return; + } + + this._enableSmoothReflections = enabled; + this._updateEffectDefines(); + } + + /** + * Gets the number of samples taken while computing reflections. More samples count is high, + * more the post-process wil require GPU power and can generate a drop in FPS. + */ + @serialize() + public get reflectionSamples(): number { + return this._reflectionSamples; + } + + /** + * Sets the number of samples taken while computing reflections. More samples count is high, + * more the post-process wil require GPU power and can generate a drop in FPS. + */ + public set reflectionSamples(samples: number) { + if (samples === this._reflectionSamples) { + return; + } + + this._reflectionSamples = samples; + this._updateEffectDefines(); + } + + private _updateEffectDefines(): void { + const defines: string[] = []; + if (this._geometryBufferRenderer) { + defines.push("#define SSR_SUPPORTED"); + } + if (this._enableSmoothReflections) { + defines.push("#define ENABLE_SMOOTH_REFLECTIONS"); + } + + defines.push("#define REFLECTION_SAMPLES " + (this._reflectionSamples >> 0)); + + this.updateEffect(defines.join("\n")); + } +} diff --git a/src/Rendering/geometryBufferRenderer.ts b/src/Rendering/geometryBufferRenderer.ts index 486bfe554b4..4fdf1b4f64d 100644 --- a/src/Rendering/geometryBufferRenderer.ts +++ b/src/Rendering/geometryBufferRenderer.ts @@ -11,11 +11,13 @@ import { Material } from "../Materials/material"; import { MaterialHelper } from "../Materials/materialHelper"; import { Scene } from "../scene"; import { AbstractMesh } from "../Meshes/abstractMesh"; +import { Color4 } from '../Maths/math.color'; +import { StandardMaterial } from '../Materials/standardMaterial'; +import { PBRMaterial } from '../Materials/PBR/pbrMaterial'; import "../Shaders/geometry.fragment"; import "../Shaders/geometry.vertex"; import { _DevTools } from '../Misc/devTools'; -import { Color4 } from '../Maths/math.color'; /** @hidden */ interface ISavedTransformationMatrix { @@ -37,6 +39,11 @@ export class GeometryBufferRenderer { * using getIndex(GeometryBufferRenderer.VELOCITY_TEXTURE_INDEX) */ public static readonly VELOCITY_TEXTURE_TYPE = 2; + /** + * Constant used to retrieve the roughness texture index in the G-Buffer textures array + * using the getIndex(GeometryBufferRenderer.ROUGHNESS_TEXTURE_TYPE) + */ + public static readonly ROUGHNESS_TEXTURE_TYPE = 3; /** * Dictionary used to store the previous transformation matrices of each rendered mesh @@ -61,9 +68,11 @@ export class GeometryBufferRenderer { private _ratio: number; private _enablePosition: boolean = false; private _enableVelocity: boolean = false; + private _enableRoughness: boolean = false; private _positionIndex: number = -1; private _velocityIndex: number = -1; + private _roughnessIndex: number = -1; protected _effect: Effect; protected _cachedDefines: string; @@ -92,6 +101,7 @@ export class GeometryBufferRenderer { switch (textureType) { case GeometryBufferRenderer.POSITION_TEXTURE_TYPE: return this._positionIndex; case GeometryBufferRenderer.VELOCITY_TEXTURE_TYPE: return this._velocityIndex; + case GeometryBufferRenderer.ROUGHNESS_TEXTURE_TYPE: return this._roughnessIndex; default: return -1; } } @@ -133,6 +143,22 @@ export class GeometryBufferRenderer { this._createRenderTargets(); } + /** + * Gets a boolean indicating if objects roughness are enabled in the G buffer. + */ + public get enableRoughness(): boolean { + return this._enableRoughness; + } + + /** + * Sets wether or not objects roughness are enabled for the G buffer. + */ + public set enableRoughness(enable: boolean) { + this._enableRoughness = enable; + this.dispose(); + this._createRenderTargets(); + } + /** * Gets the scene associated with the buffer. */ @@ -175,28 +201,54 @@ export class GeometryBufferRenderer { * @returns true if ready otherwise false */ public isReady(subMesh: SubMesh, useInstances: boolean): boolean { - var material: any = subMesh.getMaterial(); + var material = subMesh.getMaterial(); if (material && material.disableDepthWrite) { return false; } var defines = []; - var attribs = [VertexBuffer.PositionKind, VertexBuffer.NormalKind]; - var mesh = subMesh.getMesh(); // Alpha test - if (material && material.needAlphaTesting()) { - defines.push("#define ALPHATEST"); - if (mesh.isVerticesDataPresent(VertexBuffer.UVKind)) { - attribs.push(VertexBuffer.UVKind); - defines.push("#define UV1"); + if (material) { + let needUv = false; + if (material.needAlphaBlending()) { + defines.push("#define ALPHATEST"); + needUv = true; } - if (mesh.isVerticesDataPresent(VertexBuffer.UV2Kind)) { - attribs.push(VertexBuffer.UV2Kind); - defines.push("#define UV2"); + + if (this._enableRoughness) { + if (material instanceof StandardMaterial && material.specularTexture) { + defines.push("#define HAS_SPECULAR"); + needUv = true; + } else if (material instanceof PBRMaterial && material.metallicTexture) { + defines.push("#define HAS_METALLIC"); + if (material.useRoughnessFromMetallicTextureAlpha) { + defines.push("#define ROUGHNESSSTOREINMETALMAPALPHA"); + } else if (material.useRoughnessFromMetallicTextureGreen) { + defines.push("#define ROUGHNESSSTOREINMETALMAPGREEN"); + } + + if (material.useMetallnessFromMetallicTextureBlue) { + defines.push("#define METALLNESSSTOREINMETALMAPBLUE"); + } + + needUv = true; + } + } + + if (needUv) { + defines.push("#define NEED_UV"); + if (mesh.isVerticesDataPresent(VertexBuffer.UVKind)) { + attribs.push(VertexBuffer.UVKind); + defines.push("#define UV1"); + } + if (mesh.isVerticesDataPresent(VertexBuffer.UV2Kind)) { + attribs.push(VertexBuffer.UV2Kind); + defines.push("#define UV2"); + } } } @@ -214,6 +266,11 @@ export class GeometryBufferRenderer { } } + if (this._enableRoughness) { + defines.push("#define ROUGHNESS"); + defines.push("#define ROUGHNESS_INDEX " + this._roughnessIndex); + } + // Bones if (mesh.useBones && mesh.computeBonesUsingShaders) { attribs.push(VertexBuffer.MatricesIndicesKind); @@ -258,9 +315,9 @@ export class GeometryBufferRenderer { this._effect = this._scene.getEngine().createEffect("geometry", attribs, ["world", "mBones", "viewProjection", "diffuseMatrix", "view", "previousWorld", "previousViewProjection", "mPreviousBones", "morphTargetInfluences"], - ["diffuseSampler"], join, + ["diffuseSampler", "roughnessSampler"], join, undefined, undefined, undefined, - { buffersCount: this._enablePosition ? 3 : 2, maxSimultaneousMorphTargets: numMorphInfluencers }); + { buffersCount: this._multiRenderTarget.textures.length - 1, maxSimultaneousMorphTargets: numMorphInfluencers }); } return this._effect.isReady(); @@ -309,6 +366,11 @@ export class GeometryBufferRenderer { count++; } + if (this._enableRoughness) { + this._roughnessIndex = count; + count++; + } + this._multiRenderTarget = new MultiRenderTarget("gBuffer", { width: engine.getRenderWidth() * this._ratio, height: engine.getRenderHeight() * this._ratio }, count, this._scene, { generateMipMaps: false, generateDepthTexture: true, defaultType: Constants.TEXTURETYPE_FLOAT }); @@ -371,13 +433,23 @@ export class GeometryBufferRenderer { this._effect.setMatrix("viewProjection", scene.getTransformMatrix()); this._effect.setMatrix("view", scene.getViewMatrix()); - // Alpha test - if (material && material.needAlphaTesting()) { - var alphaTexture = material.getAlphaTestTexture(); + if (material) { + // Alpha test + if (material.needAlphaTesting()) { + var alphaTexture = material.getAlphaTestTexture(); + if (alphaTexture) { + this._effect.setTexture("diffuseSampler", alphaTexture); + this._effect.setMatrix("diffuseMatrix", alphaTexture.getTextureMatrix()); + } + } - if (alphaTexture) { - this._effect.setTexture("diffuseSampler", alphaTexture); - this._effect.setMatrix("diffuseMatrix", alphaTexture.getTextureMatrix()); + // Roughness + if (this._enableRoughness) { + if (material instanceof StandardMaterial && material.specularTexture) { + this._effect.setTexture("roughnessSampler", material.specularTexture); + } else if (material instanceof PBRMaterial && material.metallicTexture) { + this._effect.setTexture("roughnessSampler", material.metallicTexture); + } } } diff --git a/src/Shaders/geometry.fragment.fx b/src/Shaders/geometry.fragment.fx index 77b584d303d..ed1d23fbb6e 100644 --- a/src/Shaders/geometry.fragment.fx +++ b/src/Shaders/geometry.fragment.fx @@ -15,11 +15,18 @@ varying vec4 vCurrentPosition; varying vec4 vPreviousPosition; #endif +#ifdef ROUGHNESS +uniform sampler2D roughnessSampler; +#endif + #ifdef ALPHATEST -varying vec2 vUV; uniform sampler2D diffuseSampler; #endif +#if defined(ALPHATEST) || defined(ROUGHNESS) +varying vec2 vUV; +#endif + #include[RENDER_TARGET_COUNT] void main() { @@ -28,7 +35,7 @@ void main() { discard; #endif - gl_FragData[0] = vec4(vViewPos.z / vViewPos.w, 0.0, 0.0, 1.0); + gl_FragData[0] = vec4(vViewPos.z / vViewPos.w, 0.0, 0.0, vViewPos.w); //color0 = vec4(vViewPos.z / vViewPos.w, 0.0, 0.0, 1.0); gl_FragData[1] = vec4(normalize(vNormalV), 1.0); //color2 = vec4(vPositionV, 1.0); @@ -46,4 +53,33 @@ void main() { gl_FragData[VELOCITY_INDEX] = vec4(velocity, 0.0, 1.0); #endif + + #ifdef ROUGHNESS + #ifdef HAS_SPECULAR + // Specular + vec4 roughness = vec4(texture2D(roughnessSampler, vUV).rgb, 1.0); + #elif HAS_METALLIC + // Metallic + vec4 roughness = vec4(1.0); + vec4 surfaceMetallicColorMap = texture2D(roughnessSampler, vUV); + + #ifdef METALLNESSSTOREINMETALMAPBLUE + roughness.r *= surfaceMetallicColorMap.b; + #else + roughness.r *= surfaceMetallicColorMap.r; + #endif + + #ifdef ROUGHNESSSTOREINMETALMAPALPHA + roughness.g *= surfaceMetallicColorMap.a; + #else + #ifdef ROUGHNESSSTOREINMETALMAPGREEN + roughness.g *= surfaceMetallicColorMap.g; + #endif + #endif + #else + vec4 roughness = vec4(0.0, 0.0, 0.0, 1.0); + #endif + + gl_FragData[ROUGHNESS_INDEX] = roughness; + #endif } \ No newline at end of file diff --git a/src/Shaders/geometry.vertex.fx b/src/Shaders/geometry.vertex.fx index 954967706e9..5696f304904 100644 --- a/src/Shaders/geometry.vertex.fx +++ b/src/Shaders/geometry.vertex.fx @@ -11,9 +11,11 @@ precision highp int; attribute vec3 position; attribute vec3 normal; -#if defined(ALPHATEST) || defined(NEED_UV) +#ifdef NEED_UV varying vec2 vUV; +#ifdef ALPHATEST uniform mat4 diffuseMatrix; +#endif #ifdef UV1 attribute vec2 uv; #endif @@ -110,12 +112,20 @@ void main(void) gl_Position = viewProjection * finalWorld * vec4(positionUpdated, 1.0); -#if defined(ALPHATEST) || defined(BASIC_RENDER) -#ifdef UV1 - vUV = vec2(diffuseMatrix * vec4(uvUpdated, 1.0, 0.0)); -#endif -#ifdef UV2 - vUV = vec2(diffuseMatrix * vec4(uv2, 1.0, 0.0)); -#endif -#endif + #ifdef NEED_UV + #ifdef UV1 + #ifdef ALPHATEST + vUV = vec2(diffuseMatrix * vec4(uvUpdated, 1.0, 0.0)); + #else + vUV = uv; + #endif + #endif + #ifdef UV2 + #ifdef ALPHATEST + vUV = vec2(diffuseMatrix * vec4(uv2, 1.0, 0.0)); + #else + vUV = uv2; + #endif + #endif + #endif } diff --git a/src/Shaders/screenSpaceReflection.fragment.fx b/src/Shaders/screenSpaceReflection.fragment.fx new file mode 100644 index 00000000000..31f3d291adb --- /dev/null +++ b/src/Shaders/screenSpaceReflection.fragment.fx @@ -0,0 +1,150 @@ +// Screen Space Reflection Post-Process based on the tutorial +// http://imanolfotia.com/blog/update/2017/03/11/ScreenSpaceReflections.html + +uniform sampler2D textureSampler; +uniform sampler2D normalSampler; +uniform sampler2D positionSampler; +uniform sampler2D roughnessSampler; + +uniform mat4 view; +uniform mat4 projection; + +uniform float threshold; +uniform float strength; +uniform float reflectionSpecularFalloffExponent; + +// Varyings +varying vec2 vUV; + +// Constants +const float minRayStep = 0.001; +const int numBinarySearchSteps = 5; + +struct ReflectionInfo { + vec3 color; + vec4 coords; +}; + +/** + * According to specular, see https://en.wikipedia.org/wiki/Schlick%27s_approximation + */ +vec3 fresnelSchlick(float cosTheta, vec3 F0) +{ + return F0 + (1.0 - F0) * pow(1.0 - cosTheta, 5.0); +} + +/** + * Once the pixel's coordinates has been found, let's adjust (smooth) a little bit + * by sampling multiple reflection pixels. + */ +ReflectionInfo smoothReflectionInfo(vec3 dir, vec3 hitCoord) +{ + ReflectionInfo info; + info.color = vec3(0.0); + + vec4 projectedCoord; + float sampledDepth; + + for(int i = 0; i < numBinarySearchSteps; i++) + { + projectedCoord = projection * vec4(hitCoord, 1.0); + projectedCoord.xy /= projectedCoord.w; + projectedCoord.xy = 0.5 * projectedCoord.xy + vec2(0.5); + + sampledDepth = (view * texture2D(textureSampler, projectedCoord.xy)).z; + + float depth = hitCoord.z - sampledDepth; + + dir *= 0.5; + if(depth < 0.0) + hitCoord -= dir; + else + hitCoord += dir; + + info.color += texture2D(textureSampler, projectedCoord.xy).rgb; + } + + projectedCoord = projection * vec4(hitCoord, 1.0); + projectedCoord.xy /= projectedCoord.w; + projectedCoord.xy = 0.5 * projectedCoord.xy + vec2(0.5); + + // Merge colors + info.coords = vec4(projectedCoord.xy, sampledDepth, 1.0); + info.color += texture2D(textureSampler, projectedCoord.xy).rgb; + info.color /= float(numBinarySearchSteps + 1); + return info; +} + +/** + * Tests the given world position (hitCoord) according to the given reflection vector (dir) + * until it finds a collision (means that depth is enough close to say "it's the pixel to sample!"). + */ +ReflectionInfo getReflectionInfo(vec3 dir, vec3 hitCoord) +{ + ReflectionInfo info; + vec4 projectedCoord; + float sampledDepth; + + for(int i = 0; i < REFLECTION_SAMPLES; i++) + { + hitCoord += dir; + + projectedCoord = projection * vec4(hitCoord, 1.0); + projectedCoord.xy /= projectedCoord.w; + projectedCoord.xy = 0.5 * projectedCoord.xy + vec2(0.5); + + sampledDepth = (view * texture2D(positionSampler, projectedCoord.xy)).z; + + float depth = hitCoord.z - sampledDepth; + + if(((dir.z - depth) < threshold) && depth > 0.0) + { + #ifdef ENABLE_SMOOTH_REFLECTIONS + return smoothReflectionInfo(dir, hitCoord); + #else + info.color = texture2D(textureSampler, projectedCoord.xy).rgb; + info.coords = vec4(projectedCoord.xy, sampledDepth, 0.0); + return info; + #endif + } + } + + info.color = texture2D(textureSampler, projectedCoord.xy).rgb; + info.coords = vec4(projectedCoord.xy, sampledDepth, 0.0); + return info; +} + +void main() +{ + #ifdef SSR_SUPPORTED + // Intensity + float spec = texture2D(roughnessSampler, vUV).r * strength; + if (spec < 0.1) + discard; + + // Get coordinates of the pixel to pick according to the pixel's position and normal. + vec3 albedo = texture2D(textureSampler, vUV).rgb; + vec3 normal = (texture2D(normalSampler, vUV)).xyz; + vec3 position = (view * texture2D(positionSampler, vUV)).xyz; + + vec3 reflected = normalize(reflect(normalize(position), normalize(normal))); + + ReflectionInfo info = getReflectionInfo(reflected, position); + + vec2 dCoords = smoothstep(0.2, 0.6, abs(vec2(0.5, 0.5) - info.coords.xy)); + float screenEdgefactor = clamp(1.0 - (dCoords.x + dCoords.y), 0.0, 1.0); + + // Fresnel + vec3 F0 = vec3(0.04); + F0 = mix(F0, albedo, spec); + vec3 fresnel = fresnelSchlick(max(dot(normalize(normal), normalize(position)), 0.0), F0); + + // Apply + float reflectionMultiplier = pow(spec, reflectionSpecularFalloffExponent) * screenEdgefactor; // * reflected.z; + vec3 SSR = info.color * clamp(reflectionMultiplier, 0.0, 0.9) * fresnel; + + gl_FragColor = vec4(SSR * spec, 1.0); + #else + gl_FragColor = texture2D(textureSampler, vUV); + #endif +} diff --git a/src/Shaders/standard.fragment.fx b/src/Shaders/standard.fragment.fx index 8dc9ecb5a68..d5c3212bbc2 100644 --- a/src/Shaders/standard.fragment.fx +++ b/src/Shaders/standard.fragment.fx @@ -9,6 +9,17 @@ void main(void) } #endif +#if defined(MERGE_POST_PROCESS) +uniform sampler2D otherSampler; + +void main(void) +{ + vec4 color = texture2D(textureSampler, vUV); + vec4 other = texture2D(otherSampler, vUV); + gl_FragColor = vec4(color.rgb + other.rgb, color.a); +} +#endif + #if defined(DOWN_SAMPLE_X4) uniform vec2 dsOffsets[16];