Skip to content

Commit

Permalink
Version 5.2.1.
Browse files Browse the repository at this point in the history
  • Loading branch information
vanruesc committed Nov 14, 2018
1 parent 48f91b8 commit 6135c58
Show file tree
Hide file tree
Showing 10 changed files with 6,530 additions and 5,675 deletions.
15 changes: 8 additions & 7 deletions build/postprocessing.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* postprocessing v5.2.0 build Wed Nov 14 2018
* postprocessing v5.2.1 build Wed Nov 14 2018
* https://github.com/vanruesc/postprocessing
* Copyright 2018 Raoul van Rüschen, Zlib
*/
Expand Down Expand Up @@ -3663,7 +3663,7 @@
return SMAAEffect;
}(Effect);

var fragment$s = "uniform sampler2D normalBuffer;\r\n\r\nuniform mat4 cameraProjectionMatrix;\r\nuniform mat4 cameraInverseProjectionMatrix;\r\n\r\nuniform vec2 radiusStep;\r\nuniform vec2 distanceCutoff;\r\nuniform vec2 proximityCutoff;\r\nuniform float seed;\r\nuniform float luminanceInfluence;\r\nuniform float scale;\r\nuniform float bias;\r\n\r\nfloat getViewZ(const in float depth) {\r\n\r\n\t#ifdef PERSPECTIVE_CAMERA\r\n\r\n\t\treturn perspectiveDepthToViewZ(depth, cameraNear, cameraFar);\r\n\r\n\t#else\r\n\r\n\t\treturn orthographicDepthToViewZ(depth, cameraNear, cameraFar);\r\n\r\n\t#endif\r\n\r\n}\r\n\r\nvec3 getViewPosition(const in vec2 screenPosition, const in float depth, const in float viewZ) {\r\n\r\n\tfloat clipW = cameraProjectionMatrix[2][3] * viewZ + cameraProjectionMatrix[3][3];\r\n\tvec4 clipPosition = vec4((vec3(screenPosition, depth) - 0.5) * 2.0, 1.0);\r\n\tclipPosition *= clipW; // Unproject.\r\n\r\n\treturn (cameraInverseProjectionMatrix * clipPosition).xyz;\r\n\r\n}\r\n\r\nfloat getOcclusion(const in vec3 p, const in vec3 n, const in vec3 sampleViewPosition) {\r\n\r\n\tvec3 viewDelta = sampleViewPosition - p;\r\n\tfloat d = length(viewDelta) * scale;\r\n\r\n\treturn max(0.0, dot(n, viewDelta) / d - bias) / (1.0 + pow2(d));\r\n\r\n}\r\n\r\nfloat getAmbientOcclusion(const in vec3 p, const in vec3 n, const in float depth, const in vec2 uv) {\r\n\r\n\tvec2 radius = radiusStep;\r\n\tfloat angle = rand(uv + seed) * PI2;\r\n\tfloat occlusionSum = 0.0;\r\n\tint sampleCount = 0;\r\n\r\n\t// Collect samples along a discrete spiral pattern.\r\n\tfor(int i = 0; i < SAMPLES; ++i) {\r\n\r\n\t\tvec2 coord = uv + vec2(cos(angle), sin(angle)) * radius;\r\n\t\tradius += radiusStep;\r\n\t\tangle += ANGLE_STEP;\r\n\r\n\t\tfloat sampleDepth = readDepth(coord);\r\n\r\n\t\tif(sampleDepth <= distanceCutoff.y) {\r\n\r\n\t\t\tfloat falloff = 1.0 - smoothstep(proximityCutoff.x, proximityCutoff.y, abs(depth - sampleDepth));\r\n\t\t\tvec3 sampleViewPosition = getViewPosition(coord, sampleDepth, getViewZ(sampleDepth));\r\n\t\t\tocclusionSum += getOcclusion(p, n, sampleViewPosition) * falloff;\r\n\r\n\t\t\t++sampleCount;\r\n\r\n\t\t}\r\n\r\n\t}\r\n\r\n\treturn (sampleCount == 0) ? 0.0 : occlusionSum / float(sampleCount);\r\n\r\n}\r\n\r\nvoid mainImage(const in vec4 inputColor, const in vec2 uv, const in float depth, out vec4 outputColor) {\r\n\r\n\tfloat ao = 1.0;\r\n\r\n\t// Skip fragments of objects that are too far away.\r\n\tif(depth <= distanceCutoff.y) {\r\n\r\n\t\tvec3 viewPosition = getViewPosition(uv, depth, getViewZ(depth));\r\n\t\tvec3 viewNormal = unpackRGBToNormal(texture2D(normalBuffer, uv).xyz);\r\n\t\tao -= getAmbientOcclusion(viewPosition, viewNormal, depth, uv);\r\n\r\n\t\t// Fade AO based on luminance and depth.\r\n\t\tfloat l = linearToRelativeLuminance(inputColor.rgb);\r\n\t\tao = mix(ao, 1.0, max(l * luminanceInfluence, smoothstep(distanceCutoff.x, distanceCutoff.y, depth)));\r\n\r\n\t}\r\n\r\n\toutputColor = vec4(vec3(ao), inputColor.a);\r\n\r\n}\r\n";
var fragment$s = "uniform sampler2D normalBuffer;\r\n\r\nuniform mat4 cameraProjectionMatrix;\r\nuniform mat4 cameraInverseProjectionMatrix;\r\n\r\nuniform vec2 radiusStep;\r\nuniform vec2 distanceCutoff;\r\nuniform vec2 proximityCutoff;\r\nuniform float seed;\r\nuniform float luminanceInfluence;\r\nuniform float scale;\r\nuniform float bias;\r\n\r\nfloat getViewZ(const in float depth) {\r\n\r\n\t#ifdef PERSPECTIVE_CAMERA\r\n\r\n\t\treturn perspectiveDepthToViewZ(depth, cameraNear, cameraFar);\r\n\r\n\t#else\r\n\r\n\t\treturn orthographicDepthToViewZ(depth, cameraNear, cameraFar);\r\n\r\n\t#endif\r\n\r\n}\r\n\r\nvec3 getViewPosition(const in vec2 screenPosition, const in float depth, const in float viewZ) {\r\n\r\n\tfloat clipW = cameraProjectionMatrix[2][3] * viewZ + cameraProjectionMatrix[3][3];\r\n\tvec4 clipPosition = vec4((vec3(screenPosition, depth) - 0.5) * 2.0, 1.0);\r\n\tclipPosition *= clipW; // Unproject.\r\n\r\n\treturn (cameraInverseProjectionMatrix * clipPosition).xyz;\r\n\r\n}\r\n\r\nfloat getOcclusion(const in vec3 p, const in vec3 n, const in vec3 sampleViewPosition) {\r\n\r\n\tvec3 viewDelta = sampleViewPosition - p;\r\n\tfloat d = length(viewDelta) * scale;\r\n\r\n\treturn max(0.0, dot(n, viewDelta) / d - bias) / (1.0 + pow2(d));\r\n\r\n}\r\n\r\nfloat getAmbientOcclusion(const in vec3 p, const in vec3 n, const in float depth, const in vec2 uv) {\r\n\r\n\tvec2 radius = radiusStep;\r\n\tfloat angle = rand(uv + seed) * PI2;\r\n\tfloat occlusionSum = 0.0;\r\n\r\n\t// Collect samples along a discrete spiral pattern.\r\n\tfor(int i = 0; i < SAMPLES_INT; ++i) {\r\n\r\n\t\tvec2 coord = uv + vec2(cos(angle), sin(angle)) * radius;\r\n\t\tradius += radiusStep;\r\n\t\tangle += ANGLE_STEP;\r\n\r\n\t\tfloat sampleDepth = readDepth(coord);\r\n\t\tfloat proximity = abs(depth - sampleDepth);\r\n\r\n\t\tif(sampleDepth < distanceCutoff.y && proximity < proximityCutoff.y) {\r\n\r\n\t\t\tfloat falloff = 1.0 - smoothstep(proximityCutoff.x, proximityCutoff.y, proximity);\r\n\t\t\tvec3 sampleViewPosition = getViewPosition(coord, sampleDepth, getViewZ(sampleDepth));\r\n\t\t\tocclusionSum += getOcclusion(p, n, sampleViewPosition) * falloff;\r\n\r\n\t\t}\r\n\r\n\t}\r\n\r\n\treturn occlusionSum / SAMPLES_FLOAT;\r\n\r\n}\r\n\r\nvoid mainImage(const in vec4 inputColor, const in vec2 uv, const in float depth, out vec4 outputColor) {\r\n\r\n\tfloat ao = 1.0;\r\n\r\n\t// Skip fragments of objects that are too far away.\r\n\tif(depth < distanceCutoff.y) {\r\n\r\n\t\tvec3 viewPosition = getViewPosition(uv, depth, getViewZ(depth));\r\n\t\tvec3 viewNormal = unpackRGBToNormal(texture2D(normalBuffer, uv).xyz);\r\n\t\tao -= getAmbientOcclusion(viewPosition, viewNormal, depth, uv);\r\n\r\n\t\t// Fade AO based on luminance and depth.\r\n\t\tfloat l = linearToRelativeLuminance(inputColor.rgb);\r\n\t\tao = mix(ao, 1.0, max(l * luminanceInfluence, smoothstep(distanceCutoff.x, distanceCutoff.y, depth)));\r\n\r\n\t}\r\n\r\n\toutputColor = vec4(vec3(ao), inputColor.a);\r\n\r\n}\r\n";

var SSAOEffect = function (_Effect) {
_inherits(SSAOEffect, _Effect);
Expand Down Expand Up @@ -3691,7 +3691,7 @@
_this = _possibleConstructorReturn(this, _getPrototypeOf(SSAOEffect).call(this, "SSAOEffect", fragment$s, {
attributes: EffectAttribute.DEPTH,
blendFunction: settings.blendFunction,
defines: new Map([["RINGS", "0"], ["SAMPLES", "0"]]),
defines: new Map([["RINGS_INT", "0"], ["SAMPLES_INT", "0"], ["SAMPLES_FLOAT", "0.0"]]),
uniforms: new Map([["normalBuffer", new three.Uniform(normalBuffer)], ["cameraInverseProjectionMatrix", new three.Uniform(new three.Matrix4())], ["cameraProjectionMatrix", new three.Uniform(new three.Matrix4())], ["radiusStep", new three.Uniform(new three.Vector2())], ["distanceCutoff", new three.Uniform(new three.Vector2())], ["proximityCutoff", new three.Uniform(new three.Vector2())], ["seed", new three.Uniform(Math.random())], ["luminanceInfluence", new three.Uniform(settings.luminanceInfluence)], ["scale", new three.Uniform(settings.scale)], ["bias", new three.Uniform(settings.bias)]])
}));
_this.r = 0.0;
Expand Down Expand Up @@ -3740,22 +3740,23 @@
}, {
key: "samples",
get: function get() {
return Number.parseInt(this.defines.get("SAMPLES"));
return Number.parseInt(this.defines.get("SAMPLES_INT"));
},
set: function set(value) {
value = Math.floor(value);
this.defines.set("SAMPLES", value.toFixed(0));
this.defines.set("SAMPLES_INT", value.toFixed(0));
this.defines.set("SAMPLES_FLOAT", value.toFixed(1));
this.updateAngleStep();
this.updateRadiusStep();
}
}, {
key: "rings",
get: function get() {
return Number.parseInt(this.defines.get("RINGS"));
return Number.parseInt(this.defines.get("RINGS_INT"));
},
set: function set(value) {
value = Math.floor(value);
this.defines.set("RINGS", value.toFixed(0));
this.defines.set("RINGS_INT", value.toFixed(0));
this.updateAngleStep();
}
}, {
Expand Down
4 changes: 2 additions & 2 deletions build/postprocessing.min.js

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "postprocessing",
"version": "5.2.0",
"version": "5.2.1",
"description": "A post processing library that provides the means to implement image filter effects for three.js.",
"homepage": "https://github.com/vanruesc/postprocessing",
"main": "build/postprocessing.js",
Expand Down
13 changes: 7 additions & 6 deletions public/demo/index.js

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion public/demo/index.min.js

Large diffs are not rendered by default.

Loading

0 comments on commit 6135c58

Please sign in to comment.