Depth of Field shader for points/strokes in Processing - glsl

Recently I've been using the Depth of Field shader below (originally from the ofxPostProcessing library for OpenFrameworks) for my Processing sketches.
depth.glsl
uniform float maxDepth;
void main() {
float depth = gl_FragCoord.z / gl_FragCoord.w;
gl_FragColor = vec4(vec3(1.0 - depth/maxDepth), 1.0);
}
dof.glsl
uniform sampler2D texture;
varying vec4 vertexture;
varying vec4 vertTexCoord;
uniform sampler2D tDepth;
uniform float maxBlur; // max blur amount
uniform float aperture; // aperture - bigger values for shallower depth of field
uniform float focus;
uniform float aspect;
void main() {
vec2 vUv = vertTexCoord.st;
vec2 aspectcorrect = vec2( 1.0, aspect );
vec4 depth1 = texture2D( tDepth, vUv );
float factor = depth1.x - focus;
vec2 dofblur = vec2 ( clamp( factor * aperture, -maxBlur, maxBlur ) );
vec2 dofblur9 = dofblur * 0.9;
vec2 dofblur7 = dofblur * 0.7;
vec2 dofblur4 = dofblur * 0.4;
vec4 col = vec4( 0.0 );
col += texture2D( texture, vUv.xy );
col += texture2D( texture, vUv.xy + ( vec2( 0.0, 0.4 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( 0.15, 0.37 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( 0.29, 0.29 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( -0.37, 0.15 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( 0.40, 0.0 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( 0.37, -0.15 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( 0.29, -0.29 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( -0.15, -0.37 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( 0.0, -0.4 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( -0.15, 0.37 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( -0.29, 0.29 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( 0.37, 0.15 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( -0.4, 0.0 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( -0.37, -0.15 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( -0.29, -0.29 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( 0.15, -0.37 ) * aspectcorrect ) * dofblur );
col += texture2D( texture, vUv.xy + ( vec2( 0.15, 0.37 ) * aspectcorrect ) * dofblur9 );
col += texture2D( texture, vUv.xy + ( vec2( -0.37, 0.15 ) * aspectcorrect ) * dofblur9 );
col += texture2D( texture, vUv.xy + ( vec2( 0.37, -0.15 ) * aspectcorrect ) * dofblur9 );
col += texture2D( texture, vUv.xy + ( vec2( -0.15, -0.37 ) * aspectcorrect ) * dofblur9 );
col += texture2D( texture, vUv.xy + ( vec2( -0.15, 0.37 ) * aspectcorrect ) * dofblur9 );
col += texture2D( texture, vUv.xy + ( vec2( 0.37, 0.15 ) * aspectcorrect ) * dofblur9 );
col += texture2D( texture, vUv.xy + ( vec2( -0.37, -0.15 ) * aspectcorrect ) * dofblur9 );
col += texture2D( texture, vUv.xy + ( vec2( 0.15, -0.37 ) * aspectcorrect ) * dofblur9 );
col += texture2D( texture, vUv.xy + ( vec2( 0.29, 0.29 ) * aspectcorrect ) * dofblur7 );
col += texture2D( texture, vUv.xy + ( vec2( 0.40, 0.0 ) * aspectcorrect ) * dofblur7 );
col += texture2D( texture, vUv.xy + ( vec2( 0.29, -0.29 ) * aspectcorrect ) * dofblur7 );
col += texture2D( texture, vUv.xy + ( vec2( 0.0, -0.4 ) * aspectcorrect ) * dofblur7 );
col += texture2D( texture, vUv.xy + ( vec2( -0.29, 0.29 ) * aspectcorrect ) * dofblur7 );
col += texture2D( texture, vUv.xy + ( vec2( -0.4, 0.0 ) * aspectcorrect ) * dofblur7 );
col += texture2D( texture, vUv.xy + ( vec2( -0.29, -0.29 ) * aspectcorrect ) * dofblur7 );
col += texture2D( texture, vUv.xy + ( vec2( 0.0, 0.4 ) * aspectcorrect ) * dofblur7 );
col += texture2D( texture, vUv.xy + ( vec2( 0.29, 0.29 ) * aspectcorrect ) * dofblur4 );
col += texture2D( texture, vUv.xy + ( vec2( 0.4, 0.0 ) * aspectcorrect ) * dofblur4 );
col += texture2D( texture, vUv.xy + ( vec2( 0.29, -0.29 ) * aspectcorrect ) * dofblur4 );
col += texture2D( texture, vUv.xy + ( vec2( 0.0, -0.4 ) * aspectcorrect ) * dofblur4 );
col += texture2D( texture, vUv.xy + ( vec2( -0.29, 0.29 ) * aspectcorrect ) * dofblur4 );
col += texture2D( texture, vUv.xy + ( vec2( -0.4, 0.0 ) * aspectcorrect ) * dofblur4 );
col += texture2D( texture, vUv.xy + ( vec2( -0.29, -0.29 ) * aspectcorrect ) * dofblur4 );
col += texture2D( texture, vUv.xy + ( vec2( 0.0, 0.4 ) * aspectcorrect ) * dofblur4 );
gl_FragColor = col / 41.0;
gl_FragColor.a = 1.0;
}
The DOF effect works great with primitives like BOX or SPHERE (see the example sketch here after) but, unfortunately, doesn't work when it comes to displaying POINT and STROKE (same amount of blur for each and every point, no matter how far they are from the camera).
I think it has to do with the fact that points and strokes can't be controled with a color shader but can't say for sure as I'm still new to shaders in general.
Questions:
Is it possible to tweak the shaders above in order to get it working with points/strokes or do I have to use/write a totally different Depth of Field shader for these specific primitives ?
If a simple tweak is possible, could you help me figure out what I have to add/modify in order to get it working with points ?
Any suggestion would be greatly appreciated.
Thank you
Example sketch (Python mode + PeasyCam library needed)
add_library('peasycam')
colors, liste = [[0,189,202], [251,183,0], [255,17,79], [252,128,35], [0,108,254]], []
def setup():
global depthShader, dofShader, cam, buf1, buf2, buf3, pnt
size(900, 900, P3D)
frameRate(1000)
cam = PeasyCam(this, 900)
cam.setMaximumDistance(width)
pnt = createShape(BOX, 30)
pnt.setStroke(False)
depthShader, dofShader = loadShader("depth.glsl"), loadShader("dof.glsl")
depthShader.set("maxDepth", cam.getDistance()*2)
dofShader.set("aspect", width / float(height)), dofShader.set("maxBlur", 0.02), dofShader.set("aperture", 0.06)
buf1, buf2, buf3 = [createGraphics(width, height, P3D) for e in range(3)]
buf1.smooth(8), buf2.shader(depthShader), buf3.shader(dofShader)
for e in range(300): liste.append(PVector(random(width), random(height), random(width)))
def drawScene(pg):
pg.beginDraw()
pg.background(0)
for i in range(len(liste)):
pg.pushMatrix()
pg.translate(liste[i].x-width/2, liste[i].y-width/2, liste[i].z-width/2)
pg.shape(pnt)
pnt.setFill(color(colors[i%5][0], colors[i%5][1], colors[i%5][2]))
pg.popMatrix()
pg.endDraw()
cam.getState().apply(pg)
def draw():
drawScene(buf1)
drawScene(buf2)
buf3.beginDraw()
dofShader.set("tDepth", buf2)
dofShader.set("focus", map(mouseX, 0, width, .3, 1))
buf3.image(buf1, 0, 0)
buf3.endDraw()
cam.beginHUD()
image(buf3, 0, 0)
cam.endHUD()

Related

How to do a float to RGBA and back round trip in GLSL (WebGL GLSL ES 1.0)? [duplicate]

Trying to understand the many issues related to the WebGL development for a generic mobile target, now I need to store depth information in a texture attachment for later retrieval and post-processing.
JavaScript:
var depthRB = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, depthRB);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, w, h);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, depthRB);
gl.bindRenderbuffer(gl.RENDERBUFFER, null);
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, w, h, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
Vertex shader:
precision mediump float;
uniform mat4 u_transformMatrix;
attribute vec3 a_position;
varying float v_depth;
void main() {
vec4 tcoords = u_transformMatrix * vec4(a_position, 1.0);
v_depth = 0.5 * (tcoords.z + 1.0);
gl_Position = tcoords;,
}
Fragment shader:
precision mediump float;
varying float v_depth;
vec4 PackDepth(in float frag_depth) {
vec4 bitSh = vec4(256.0 * 256.0 * 256.0, 256.0 * 256.0, 256.0, 1.0);
vec4 bitMsk = vec4(0.0, 1.0 / 256.0, 1.0 / 256.0, 1.0 / 256.0);
vec4 enc = fract(frag_depth * bitSh);
enc -= enc.xxyz * bitMsk;
return enc;
}
float UnpackDepth( const in vec4 enc ) {
const vec4 bit_shift = vec4( 1.0 / ( 256.0 * 256.0 * 256.0 ), 1.0 / ( 256.0 * 256.0 ), 1.0 / 256.0, 1.0 );
float decoded = dot( enc, bit_shift );
return decoded;
}
void main() {
vec4 encoded_depth;
float decoded_depth;
encoded_depth = PackDepth(v_depth);
decoded_depth = UnpackDepth(encoded_depth);
//gl_FragColor = vec4(vec3(decoded_depth), 1.0);
gl_FragColor = encoded_depth;',
}
This is what i get now: left: iPad PRO/Android/desktop Chrome --emulate-shader-precision, middle: desktop FF/Chrome (no flags), right: encoded and decoded (obviously as 256 tones gray-scale)
I tried many different methods for packing/unpacking but none seems to work.
Any advice about what I am doing wrong?
Moreover, i noticed also many examples of the most common WebGL libraries which uses a RGBA texture to store depth information are broken - i believe for the same reason, somewhere an issue in the pack/unpack functions.
EDIT: same issue in Three.js: https://github.com/mrdoob/three.js/issues/9092
Interesting thing, if I use the old mod approach to packing depth, I
get a bunch more precision (at least a couple more bits)
Which is the correct approach to store and retrieve depth information, using mediump precision?
The floting point precision for a variable with the precsion qualifier mediump is guaranteed to 10 bits.
See OpenGL ES Shading Language 1.00 Specification - 4.5.2 Precision Qualifiers, page 33
The required minimum ranges and precisions for precision qualifiers are:
For this reason, only the two highest bytes of the encoded depth have a meaning. The algorithm stores the highest byte in the alpha channel and the second highest byte in the blue color channel. This causes that an RGB view of the encoded depth may look arbitrary.
Further the algorithm has an overflow for the depth of 1.0. This causes that the the depth of 1 is encoded as a completely black color, but black becomes 0.0 when it is decoded.
An algorithm which encodes a depth value in the range from [0.0, 1.0], into 16 bits from b00000000 to b11111111, may look like this (RG color channel):
vec2 PackDepth16( in float depth )
{
float depthVal = depth * (256.0*256.0 - 1.0) / (256.0*256.0);
vec3 encode = fract( depthVal * vec3(1.0, 256.0, 256.0*256.0) );
return encode.xy - encode.yz / 256.0 + 1.0/512.0;
}
float UnpackDepth16( in vec2 pack )
{
float depth = dot( pack, 1.0 / vec2(1.0, 256.0) );
return depth * (256.0*256.0) / (256.0*256.0 - 1.0);
}
This algorithm can be extended to 24 bits or 32 bits:
vec3 PackDepth24( in float depth )
{
float depthVal = depth * (256.0*256.0*256.0 - 1.0) / (256.0*256.0*256.0);
vec4 encode = fract( depthVal * vec4(1.0, 256.0, 256.0*256.0, 256.0*256.0*256.0) );
return encode.xyz - encode.yzw / 256.0 + 1.0/512.0;
}
float UnpackDepth24( in vec3 pack )
{
float depth = dot( pack, 1.0 / vec3(1.0, 256.0, 256.0*256.0) );
return depth * (256.0*256.0*256.0) / (256.0*256.0*256.0 - 1.0);
}
vec4 PackDepth32( in float depth )
{
depth *= (256.0*256.0*256.0 - 1.0) / (256.0*256.0*256.0);
vec4 encode = fract( depth * vec4(1.0, 256.0, 256.0*256.0, 256.0*256.0*256.0) );
return vec4( encode.xyz - encode.yzw / 256.0, encode.w ) + 1.0/512.0;
}
float UnpackDepth32( in vec4 pack )
{
float depth = dot( pack, 1.0 / vec4(1.0, 256.0, 256.0*256.0, 256.0*256.0*256.0) );
return depth * (256.0*256.0*256.0) / (256.0*256.0*256.0 - 1.0);
}
See the code snippet, which compares the algorithm from the answer (top part) and the algorithm from question (bottom part):
(function onLoad() {
// shader program object
var ShaderProgram = {};
ShaderProgram.Create = function( shaderList, uniformNames ) {
var shaderObjs = [];
for ( var i_sh = 0; i_sh < shaderList.length; ++ i_sh ) {
var shderObj = this.CompileShader( shaderList[i_sh].source, shaderList[i_sh].stage );
if ( shderObj == 0 )
return 0;
shaderObjs.push( shderObj );
}
var progObj = this.LinkProgram( shaderObjs )
if ( progObj != 0 ) {
progObj.unifomLocation = {};
for ( var i_n = 0; i_n < uniformNames.length; ++ i_n ) {
var name = uniformNames[i_n];
progObj.unifomLocation[name] = gl.getUniformLocation( progObj, name );
}
}
return progObj;
}
ShaderProgram.Use = function( progObj ) { gl.useProgram( progObj ); }
ShaderProgram.CompileShader = function( source, shaderStage ) {
var shaderScript = document.getElementById(source);
if (shaderScript) {
source = "";
var node = shaderScript.firstChild;
while (node) {
if (node.nodeType == 3) source += node.textContent;
node = node.nextSibling;
}
}
var shaderObj = gl.createShader( shaderStage );
gl.shaderSource( shaderObj, source );
gl.compileShader( shaderObj );
var status = gl.getShaderParameter( shaderObj, gl.COMPILE_STATUS );
if ( !status ) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : 0;
}
ShaderProgram.LinkProgram = function( shaderObjs ) {
var prog = gl.createProgram();
for ( var i_sh = 0; i_sh < shaderObjs.length; ++ i_sh )
gl.attachShader( prog, shaderObjs[i_sh] );
gl.linkProgram( prog );
status = gl.getProgramParameter( prog, gl.LINK_STATUS );
if ( !status ) alert("Could not initialise shaders");
gl.useProgram( null );
return status ? prog : 0;
}
function drawScene(){
var canvas = document.getElementById( "ogl-canvas" );
var vp = [canvas.width, canvas.height];
gl.viewport( 0, 0, canvas.width, canvas.height );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
ShaderProgram.Use( progDraw );
gl.enableVertexAttribArray( progDraw.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.vertexAttribPointer( progDraw.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.drawArrays( gl.TRIANGLE_STRIP, 0, 4 );
gl.disableVertexAttribArray( progDraw.pos );
}
var gl;
var prog;
var bufObj = {};
var canvas
function sceneStart() {
container = document.getElementById('container');
canvas = document.getElementById( "ogl-canvas");
resize();
gl = canvas.getContext( "experimental-webgl" );
if ( !gl )
return;
progDraw = ShaderProgram.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
], [] );
progDraw.inPos = gl.getAttribLocation( progDraw, "inPos" );
if ( prog == 0 )
return;
bufObj.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( [ -1, -1, 1, -1, -1, 1, 1, 1 ] ), gl.STATIC_DRAW );
window.onresize = resize;
setInterval(drawScene, 50);
}
function resize() {
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
}
sceneStart();
})();
<canvas id="ogl-canvas"></canvas>
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
varying vec2 vertPos;
void main()
{
vertPos = inPos;
gl_Position = vec4( inPos.xy, 0.0, 1.0 );
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 vertPos;
vec2 PackDepth16( in float depth )
{
float depthVal = depth * (256.0*256.0 - 1.0) / (256.0*256.0);
vec3 encode = fract( depthVal * vec3(1.0, 256.0, 256.0*256.0) );
return encode.xy - encode.yz / 256.0 + 1.0/512.0;
}
float UnpackDepth16( in vec2 pack )
{
float depth = dot( pack, 1.0 / vec2(1.0, 256.0) );
return depth * (256.0*256.0) / (256.0*256.0 - 1.0);
}
vec4 PackDepth32_orig(in float frag_depth) {
vec4 bitSh = vec4(256.0 * 256.0 * 256.0, 256.0 * 256.0, 256.0, 1.0);
vec4 bitMsk = vec4(0.0, 1.0 / 256.0, 1.0 / 256.0, 1.0 / 256.0);
vec4 enc = fract(frag_depth * bitSh);
enc -= enc.xxyz * bitMsk;
return enc;
}
float UnpackDepth32_orig( const in vec4 enc ) {
const vec4 bit_shift = vec4( 1.0 / ( 256.0 * 256.0 * 256.0 ), 1.0 / ( 256.0 * 256.0 ), 1.0 / 256.0, 1.0 );
float decoded = dot( enc, bit_shift );
return decoded;
}
void main()
{
float depthTest = clamp(vertPos.x + 0.5, 0.0, 1.0);
vec2 color1 = clamp(PackDepth16( depthTest ), 0.0, 1.0);
float depth1 = UnpackDepth16( color1 );
vec4 color2 = clamp(PackDepth32_orig( depthTest ), 0.0, 1.0);
float depth2 = UnpackDepth32_orig( color2 );
gl_FragColor = vec4( mix( vec3(depth1), vec3(depth2), step(vertPos.y, 0.0) ), 1.0 );
}
</script>

GLSL - Gaussian Blur Artifact Issue

First of all, due to requirements constraints, I am working in OpenGL v. 2.1, and GLSL 120. I have implemented a simple fragment shader that applies a two pass (horizontal & vertical) Gaussian blur with n-kernel weights obtained from Pascal's Triangle. For the image below, I have decided to use a kernel size of 32, just for funsies:
As observed, the edges of the filtered blob seems to have some strange artifacts, as well as a ringing effect on its edges. For reference, here's how I am applying the blur:
if(isHorizontal)
{
result += texture2D(tex, vec2( curFrag.x - 14.0 * xOff, curFrag.y )).rgba * 0.000000115484001;
result += texture2D(tex, vec2( curFrag.x - 13.0 * xOff, curFrag.y )).rgba * 0.00000115484001;
result += texture2D(tex, vec2( curFrag.x - 12.0 * xOff, curFrag.y )).rgba * 0.000008372590071;
result += texture2D(tex, vec2( curFrag.x - 11.0 * xOff, curFrag.y )).rgba * 0.0000468865044;
result += texture2D(tex, vec2( curFrag.x - 10.0 * xOff, curFrag.y )).rgba * 0.0002109892698;
result += texture2D(tex, vec2( curFrag.x - 9.0 * xOff, curFrag.y )).rgba * 0.0007836744306;
result += texture2D(tex, vec2( curFrag.x - 8.0 * xOff, curFrag.y )).rgba * 0.002448982596;
result += texture2D(tex, vec2( curFrag.x - 7.0 * xOff, curFrag.y )).rgba * 0.006530620255;
result += texture2D(tex, vec2( curFrag.x - 6.0 * xOff, curFrag.y )).rgba * 0.01502042659;
result += texture2D(tex, vec2( curFrag.x - 5.0 * xOff, curFrag.y )).rgba * 0.03004085317;
result += texture2D(tex, vec2( curFrag.x - 4.0 * xOff, curFrag.y )).rgba * 0.05257149305;
result += texture2D(tex, vec2( curFrag.x - 3.0 * xOff, curFrag.y )).rgba * 0.08087922008;
result += texture2D(tex, vec2( curFrag.x - 2.0 * xOff, curFrag.y )).rgba * 0.1097646558;
result += texture2D(tex, vec2( curFrag.x - 1.0 * xOff, curFrag.y )).rgba * 0.131717587;
result += texture2D(tex, curFrag).rgba * 0.1399499362;
result += texture2D(tex, vec2( curFrag.x + 1.0 * xOff, curFrag.y )).rgba * 0.131717587;
result += texture2D(tex, vec2( curFrag.x + 2.0 * xOff, curFrag.y )).rgba * 0.1097646558;
result += texture2D(tex, vec2( curFrag.x + 3.0 * xOff, curFrag.y )).rgba * 0.08087922008;
result += texture2D(tex, vec2( curFrag.x + 4.0 * xOff, curFrag.y )).rgba * 0.05257149305;
result += texture2D(tex, vec2( curFrag.x + 5.0 * xOff, curFrag.y )).rgba * 0.03004085317;
result += texture2D(tex, vec2( curFrag.x + 6.0 * xOff, curFrag.y )).rgba * 0.01502042659;
result += texture2D(tex, vec2( curFrag.x + 7.0 * xOff, curFrag.y )).rgba * 0.006530620255;
result += texture2D(tex, vec2( curFrag.x + 8.0 * xOff, curFrag.y )).rgba * 0.002448982596;
result += texture2D(tex, vec2( curFrag.x + 9.0 * xOff, curFrag.y )).rgba * 0.0007836744306;
result += texture2D(tex, vec2( curFrag.x + 10.0 * xOff, curFrag.y )).rgba * 0.0002109892698;
result += texture2D(tex, vec2( curFrag.x + 11.0 * xOff, curFrag.y )).rgba * 0.0000468865044;
result += texture2D(tex, vec2( curFrag.x + 12.0 * xOff, curFrag.y )).rgba * 0.000008372590071;
result += texture2D(tex, vec2( curFrag.x + 13.0 * xOff, curFrag.y )).rgba * 0.00000115484001;
result += texture2D(tex, vec2( curFrag.x + 14.0 * xOff, curFrag.y )).rgba * 0.000000115484001;
}
else
{
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 14.0 * yOff )).rgba * 0.000000115484001;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 13.0 * yOff )).rgba * 0.00000115484001;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 12.0 * yOff )).rgba * 0.000008372590071;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 11.0 * yOff )).rgba * 0.0000468865044;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 10.0 * yOff )).rgba * 0.0002109892698;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 9.0 * yOff )).rgba * 0.0007836744306;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 8.0 * yOff )).rgba * 0.002448982596;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 7.0 * yOff )).rgba * 0.006530620255;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 6.0 * yOff )).rgba * 0.01502042659;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 5.0 * yOff )).rgba * 0.03004085317;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 4.0 * yOff )).rgba * 0.05257149305;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 3.0 * yOff )).rgba * 0.08087922008;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 2.0 * yOff )).rgba * 0.1097646558;
result += texture2D(tex, vec2( curFrag.x, curFrag.y - 1.0 * yOff )).rgba * 0.131717587;
result += texture2D(tex, curFrag).rgba * 0.1399499362;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 1.0 * yOff )).rgba * 0.131717587;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 2.0 * yOff )).rgba * 0.1097646558;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 3.0 * yOff )).rgba * 0.08087922008;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 4.0 * yOff )).rgba * 0.05257149305;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 5.0 * yOff )).rgba * 0.03004085317;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 6.0 * yOff )).rgba * 0.01502042659;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 7.0 * yOff )).rgba * 0.006530620255;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 8.0 * yOff )).rgba * 0.002448982596;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 9.0 * yOff )).rgba * 0.0007836744306;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 10.0 * yOff )).rgba * 0.0002109892698;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 11.0 * yOff )).rgba * 0.0000468865044;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 12.0 * yOff )).rgba * 0.000008372590071;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 13.0 * yOff )).rgba * 0.00000115484001;
result += texture2D(tex, vec2( curFrag.x, curFrag.y + 14.0 * yOff )).rgba * 0.000000115484001;
}
Furthermore, I am using two framebuffers. First, I draw the white blob onto a texture bound to the first framebuffer, then I apply my blur shader onto the second framebuffer for a horizontal pass, then back to the first one for a vertical pass. I have implemented a slider that repeats this process as well, see snippet below:
glUseProgram(gauss_blur_frag);
glUniform1f(glGetUniformLocation(gauss_blur_frag, "offset"), (float)radius);
glUniform2f(glGetUniformLocation(gauss_blur_frag, "resolution"), (float)fboWidth, (float)fboHeight);
for(int i = 1; i < smoothAmount; i++)
{
glUniform1i(glGetUniformLocation(gauss_blur_frag, "isHorizontal"), true);
drawTexOnFBO(secondFBO, firstFBO->texId, bounds);
glUniform1i(glGetUniformLocation(gauss_blur_frag, "isHorizontal"), false);
drawTexOnFBO(firstFBO, secondFBO->texId, bounds);
}
The banding/ringing/artifacts get more pronounced as I increase the offset/radius of my blur, as well as increase the number of times the for-loop runs. The aim for this exercise is to simply apply a 'softening' effect on the edges of the blob, without the kernels being visible while being able to manipulate the offset. Can anyone shed some light on this issue? Thank you.
The banding/ringing/artifacts get more pronounced as I increase the offset/radius of my blur.
Of course. If you increase the radius, the you increase the distance between the sample points, but you don't increase the number of sample points itself.
This causes that there is a gap between to texels when you lookup the 32 samples and you don't consider the whole information of the source texture.
Note, for a large radius, 2 adjacent points in the target texture uses completely different texels from the source texture when be processed. This causes the banding and artefacts.
In common this effect can be decreased by using bilinear texture filtering (GL_LINEAR).
For a completely smooth and free of artefact blur effect, you have to increase the number of samples. But this will decrease the performance rapidly.
See also Fast Gaussian blur at pause.
See the example, where the effect can be reproduced with decreasing blur factor and increasing radius:
var readInput = true;
function changeEventHandler(event){
readInput = true;
}
(function loadscene() {
var resize, gl, progDraw, progBlur, vp_size, blurFB;
var bufCube = {};
var bufQuad = {};
var shininess = 10.0;
var glow = 10.0;
var sigma = 0.8;
var radius = 1.0;
function render(delteMS){
if ( readInput ) {
//readInput = false;
var sliderScale = 100;
sigma = document.getElementById( "sigma" ).value / sliderScale;
radius = document.getElementById( "radius" ).value / sliderScale;
}
Camera.create();
Camera.vp = vp_size;
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
// set up framebuffer
gl.bindFramebuffer( gl.FRAMEBUFFER, blurFB[0] );
gl.viewport( 0, 0, blurFB[0].width, blurFB[0].height );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
// set up draw shader
ShaderProgram.Use( progDraw.prog );
ShaderProgram.SetUniformM44( progDraw.prog, "u_projectionMat44", Camera.Perspective() );
var viewMat = Camera.LookAt();
//viewMat = RotateAxis( viewMat, CalcAng( delteMS, 13.0 ), 0 );
//viewMat = RotateAxis( viewMat, CalcAng( delteMS, 17.0 ), 1 );
ShaderProgram.SetUniformM44( progDraw.prog, "u_modelViewMat44", viewMat );
ShaderProgram.SetUniformF1( progDraw.prog, "u_shininess", shininess );
// draw scene
VertexBuffer.Draw( bufCube );
// set blur-X framebuffer and bind frambuffer texture
gl.bindFramebuffer( gl.FRAMEBUFFER, blurFB[1] );
gl.viewport( 0, 0, blurFB[1].width, blurFB[1].height );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
var texUnit = 1;
gl.activeTexture( gl.TEXTURE0 + texUnit );
gl.bindTexture( gl.TEXTURE_2D, blurFB[0].color0_texture );
// set up blur-X shader
ShaderProgram.Use( progBlur.prog );
ShaderProgram.SetUniformI1( progBlur.prog, "u_texture", texUnit )
ShaderProgram.SetUniformF2( progBlur.prog, "u_textureSize", vp_size );
ShaderProgram.SetUniformF1( progBlur.prog, "u_sigma", sigma )
ShaderProgram.SetUniformF1( progBlur.prog, "u_radius", radius )
ShaderProgram.SetUniformF2( progBlur.prog, "u_dir", [1.0, 0.0] )
// draw full screen space
gl.enableVertexAttribArray( progBlur.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.vertexAttribPointer( progBlur.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.drawElements( gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progBlur.inPos );
// reset framebuffer and bind frambuffer texture
gl.bindFramebuffer( gl.FRAMEBUFFER, null );
gl.viewport( 0, 0, vp_size[0], vp_size[1] );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
texUnit = 2;
gl.activeTexture( gl.TEXTURE0 + texUnit );
gl.bindTexture( gl.TEXTURE_2D, blurFB[1].color0_texture );
// set up pst process shader
ShaderProgram.SetUniformI1( progBlur.prog, "u_texture", texUnit )
ShaderProgram.SetUniformF2( progBlur.prog, "u_dir", [0.0, 1.0] )
// draw full screen space
gl.enableVertexAttribArray( progBlur.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.vertexAttribPointer( progBlur.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.drawElements( gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progBlur.inPos );
requestAnimationFrame(render);
}
function resize() {
//vp_size = [gl.drawingBufferWidth, gl.drawingBufferHeight];
vp_size = [window.innerWidth, window.innerHeight]
//vp_size = [256, 256]
canvas.width = vp_size[0];
canvas.height = vp_size[1];
var fbsize = Math.max(vp_size[0], vp_size[1]);
fbsize = 1 << 31 - Math.clz32(fbsize); // nearest power of 2
blurFB = [];
for ( var i = 0; i < 2; ++ i ) {
fb = gl.createFramebuffer();
fb.width = fbsize;
fb.height = fbsize;
gl.bindFramebuffer( gl.FRAMEBUFFER, fb );
fb.color0_texture = gl.createTexture();
gl.bindTexture( gl.TEXTURE_2D, fb.color0_texture );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST );
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, fb.width, fb.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null );
fb.renderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer( gl.RENDERBUFFER, fb.renderbuffer );
gl.renderbufferStorage( gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, fb.width, fb.height );
gl.framebufferTexture2D( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, fb.color0_texture, 0 );
gl.framebufferRenderbuffer( gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, fb.renderbuffer );
gl.bindTexture( gl.TEXTURE_2D, null );
gl.bindRenderbuffer( gl.RENDERBUFFER, null );
gl.bindFramebuffer( gl.FRAMEBUFFER, null );
blurFB.push( fb );
}
}
function initScene() {
canvas = document.getElementById( "canvas");
gl = canvas.getContext( "experimental-webgl" );
if ( !gl )
return null;
progDraw = {}
progDraw.prog = ShaderProgram.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
if ( !progDraw.prog )
return null;
progDraw.inPos = gl.getAttribLocation( progDraw.prog, "inPos" );
progDraw.inNV = gl.getAttribLocation( progDraw.prog, "inNV" );
progDraw.inCol = gl.getAttribLocation( progDraw.prog, "inCol" );
progBlur = {}
progBlur.prog = ShaderProgram.Create(
[ { source : "post-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "blur-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
progBlur.inPos = gl.getAttribLocation( progBlur.prog, "inPos" );
if ( !progBlur.prog )
return;
// create cube
var cubePos = [
-1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 1.0,
-1.0, -1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0 ];
var cubeCol = [ 1.0, 0.0, 0.0, 1.0, 0.5, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 ];
var cubeHlpInx = [ 0, 1, 2, 3, 1, 5, 6, 2, 5, 4, 7, 6, 4, 0, 3, 7, 3, 2, 6, 7, 1, 0, 4, 5 ];
var cubePosData = [];
for ( var i = 0; i < cubeHlpInx.length; ++ i ) {
cubePosData.push( cubePos[cubeHlpInx[i]*3], cubePos[cubeHlpInx[i]*3+1], cubePos[cubeHlpInx[i]*3+2] );
}
var cubeNVData = [];
for ( var i1 = 0; i1 < cubeHlpInx.length; i1 += 4 ) {
var nv = [0, 0, 0];
for ( i2 = 0; i2 < 4; ++ i2 ) {
var i = i1 + i2;
nv[0] += cubePosData[i*3]; nv[1] += cubePosData[i*3+1]; nv[2] += cubePosData[i*3+2];
}
for ( i2 = 0; i2 < 4; ++ i2 )
cubeNVData.push( nv[0], nv[1], nv[2] );
}
var cubeColData = [];
for ( var is = 0; is < 6; ++ is ) {
for ( var ip = 0; ip < 4; ++ ip ) {
cubeColData.push( cubeCol[is*3], cubeCol[is*3+1], cubeCol[is*3+2] );
}
}
var cubeInxData = [];
for ( var i = 0; i < cubeHlpInx.length; i += 4 ) {
cubeInxData.push( i, i+1, i+2, i, i+2, i+3 );
}
bufCube = VertexBuffer.Create(
[ { data : cubePosData, attrSize : 3, attrLoc : progDraw.inPos },
{ data : cubeNVData, attrSize : 3, attrLoc : progDraw.inNV },
{ data : cubeColData, attrSize : 3, attrLoc : progDraw.inCol } ],
cubeInxData );
bufQuad.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( [ -1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0 ] ), gl.STATIC_DRAW );
bufQuad.inx = gl.createBuffer();
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( [ 0, 1, 2, 0, 2, 3 ] ), gl.STATIC_DRAW );
window.onresize = resize;
resize();
requestAnimationFrame(render);
}
function Fract( val ) {
return val - Math.trunc( val );
}
function CalcAng( deltaTime, intervall ) {
return Fract( deltaTime / (1000*intervall) ) * 2.0 * Math.PI;
}
function CalcMove( deltaTime, intervall, range ) {
var pos = self.Fract( deltaTime / (1000*intervall) ) * 2.0
var pos = pos < 1.0 ? pos : (2.0-pos)
return range[0] + (range[1] - range[0]) * pos;
}
function EllipticalPosition( a, b, angRag ) {
var a_b = a * a - b * b
var ea = (a_b <= 0) ? 0 : Math.sqrt( a_b );
var eb = (a_b >= 0) ? 0 : Math.sqrt( -a_b );
return [ a * Math.sin( angRag ) - ea, b * Math.cos( angRag ) - eb, 0 ];
}
function IdentityMat44() {
return [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ];
};
function RotateAxis(matA, angRad, axis) {
var aMap = [ [1, 2], [2, 0], [0, 1] ];
var a0 = aMap[axis][0], a1 = aMap[axis][1];
var sinAng = Math.sin(angRad), cosAng = Math.cos(angRad);
var matB = matA.slice(0);
for ( var i = 0; i < 3; ++ i ) {
matB[a0*4+i] = matA[a0*4+i] * cosAng + matA[a1*4+i] * sinAng;
matB[a1*4+i] = matA[a0*4+i] * -sinAng + matA[a1*4+i] * cosAng;
}
return matB;
}
function Cross( a, b ) { return [ a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0], 0.0 ]; }
function Dot( a, b ) { return a[0]*b[0] + a[1]*b[1] + a[2]*b[2]; }
function Normalize( v ) {
var len = Math.sqrt( v[0] * v[0] + v[1] * v[1] + v[2] * v[2] );
return [ v[0] / len, v[1] / len, v[2] / len ];
}
var Camera = {};
Camera.create = function() {
this.pos = [0, 3, 0.0];
this.target = [0, 0, 0];
this.up = [0, 0, 1];
this.fov_y = 90;
this.vp = [800, 600];
this.near = 0.5;
this.far = 100.0;
}
Camera.Perspective = function() {
var fn = this.far + this.near;
var f_n = this.far - this.near;
var r = this.vp[0] / this.vp[1];
var t = 1 / Math.tan( Math.PI * this.fov_y / 360 );
var m = IdentityMat44();
m[0] = t/r; m[1] = 0; m[2] = 0; m[3] = 0;
m[4] = 0; m[5] = t; m[6] = 0; m[7] = 0;
m[8] = 0; m[9] = 0; m[10] = -fn / f_n; m[11] = -1;
m[12] = 0; m[13] = 0; m[14] = -2 * this.far * this.near / f_n; m[15] = 0;
return m;
}
Camera.LookAt = function() {
var mz = Normalize( [ this.pos[0]-this.target[0], this.pos[1]-this.target[1], this.pos[2]-this.target[2] ] );
var mx = Normalize( Cross( this.up, mz ) );
var my = Normalize( Cross( mz, mx ) );
var tx = Dot( mx, this.pos );
var ty = Dot( my, this.pos );
var tz = Dot( [-mz[0], -mz[1], -mz[2]], this.pos );
var m = IdentityMat44();
m[0] = mx[0]; m[1] = my[0]; m[2] = mz[0]; m[3] = 0;
m[4] = mx[1]; m[5] = my[1]; m[6] = mz[1]; m[7] = 0;
m[8] = mx[2]; m[9] = my[2]; m[10] = mz[2]; m[11] = 0;
m[12] = tx; m[13] = ty; m[14] = tz; m[15] = 1;
return m;
}
var ShaderProgram = {};
ShaderProgram.Create = function( shaderList ) {
var shaderObjs = [];
for ( var i_sh = 0; i_sh < shaderList.length; ++ i_sh ) {
var shderObj = this.CompileShader( shaderList[i_sh].source, shaderList[i_sh].stage );
if ( shderObj == 0 )
return 0;
shaderObjs.push( shderObj );
}
var progObj = this.LinkProgram( shaderObjs )
if ( progObj != 0 ) {
progObj.attribIndex = {};
var noOfAttributes = gl.getProgramParameter( progObj, gl.ACTIVE_ATTRIBUTES );
for ( var i_n = 0; i_n < noOfAttributes; ++ i_n ) {
var name = gl.getActiveAttrib( progObj, i_n ).name;
progObj.attribIndex[name] = gl.getAttribLocation( progObj, name );
}
progObj.unifomLocation = {};
var noOfUniforms = gl.getProgramParameter( progObj, gl.ACTIVE_UNIFORMS );
for ( var i_n = 0; i_n < noOfUniforms; ++ i_n ) {
var name = gl.getActiveUniform( progObj, i_n ).name;
progObj.unifomLocation[name] = gl.getUniformLocation( progObj, name );
}
}
return progObj;
}
ShaderProgram.AttributeIndex = function( progObj, name ) { return progObj.attribIndex[name]; }
ShaderProgram.UniformLocation = function( progObj, name ) { return progObj.unifomLocation[name]; }
ShaderProgram.Use = function( progObj ) { gl.useProgram( progObj ); }
ShaderProgram.SetUniformI1 = function( progObj, name, val ) { if(progObj.unifomLocation[name]) gl.uniform1i( progObj.unifomLocation[name], val ); }
ShaderProgram.SetUniformF1 = function( progObj, name, val ) { if(progObj.unifomLocation[name]) gl.uniform1f( progObj.unifomLocation[name], val ); }
ShaderProgram.SetUniformF2 = function( progObj, name, arr ) { if(progObj.unifomLocation[name]) gl.uniform2fv( progObj.unifomLocation[name], arr ); }
ShaderProgram.SetUniformF3 = function( progObj, name, arr ) { if(progObj.unifomLocation[name]) gl.uniform3fv( progObj.unifomLocation[name], arr ); }
ShaderProgram.SetUniformF4 = function( progObj, name, arr ) { if(progObj.unifomLocation[name]) gl.uniform4fv( progObj.unifomLocation[name], arr ); }
ShaderProgram.SetUniformM33 = function( progObj, name, mat ) { if(progObj.unifomLocation[name]) gl.uniformMatrix3fv( progObj.unifomLocation[name], false, mat ); }
ShaderProgram.SetUniformM44 = function( progObj, name, mat ) { if(progObj.unifomLocation[name]) gl.uniformMatrix4fv( progObj.unifomLocation[name], false, mat ); }
ShaderProgram.CompileShader = function( source, shaderStage ) {
var shaderScript = document.getElementById(source);
if (shaderScript)
source = shaderScript.text;
var shaderObj = gl.createShader( shaderStage );
gl.shaderSource( shaderObj, source );
gl.compileShader( shaderObj );
var status = gl.getShaderParameter( shaderObj, gl.COMPILE_STATUS );
if ( !status ) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : null;
}
ShaderProgram.LinkProgram = function( shaderObjs ) {
var prog = gl.createProgram();
for ( var i_sh = 0; i_sh < shaderObjs.length; ++ i_sh )
gl.attachShader( prog, shaderObjs[i_sh] );
gl.linkProgram( prog );
status = gl.getProgramParameter( prog, gl.LINK_STATUS );
if ( !status ) alert("Could not initialise shaders");
gl.useProgram( null );
return status ? prog : null;
}
var VertexBuffer = {};
VertexBuffer.Create = function( attributes, indices ) {
var buffer = {};
buffer.buf = [];
buffer.attr = []
for ( var i = 0; i < attributes.length; ++ i ) {
buffer.buf.push( gl.createBuffer() );
buffer.attr.push( { size : attributes[i].attrSize, loc : attributes[i].attrLoc } );
gl.bindBuffer( gl.ARRAY_BUFFER, buffer.buf[i] );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( attributes[i].data ), gl.STATIC_DRAW );
}
buffer.inx = gl.createBuffer();
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, buffer.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( indices ), gl.STATIC_DRAW );
buffer.inxLen = indices.length;
gl.bindBuffer( gl.ARRAY_BUFFER, null );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, null );
return buffer;
}
VertexBuffer.Draw = function( bufObj ) {
for ( var i = 0; i < bufObj.buf.length; ++ i ) {
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.buf[i] );
gl.vertexAttribPointer( bufObj.attr[i].loc, bufObj.attr[i].size, gl.FLOAT, false, 0, 0 );
gl.enableVertexAttribArray( bufObj.attr[i].loc );
}
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
gl.drawElements( gl.TRIANGLES, bufObj.inxLen, gl.UNSIGNED_SHORT, 0 );
for ( var i = 0; i < bufObj.buf.length; ++ i )
gl.disableVertexAttribArray( bufObj.attr[i].loc );
gl.bindBuffer( gl.ARRAY_BUFFER, null );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, null );
}
initScene();
})();
html,body { margin: 0; overflow: hidden; }
#gui { position : absolute; top : 0; left : 0; }
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec3 inPos;
attribute vec3 inNV;
attribute vec3 inCol;
varying vec3 vertPos;
varying vec3 vertNV;
varying vec3 vertCol;
uniform mat4 u_projectionMat44;
uniform mat4 u_modelViewMat44;
void main()
{
vertNV = mat3( u_modelViewMat44 ) * normalize( inNV );
vertCol = inCol;
vec4 pos = u_modelViewMat44 * vec4( inPos, 1.0 );
vertPos = pos.xyz / pos.w;
gl_Position = u_projectionMat44 * pos;
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec3 vertPos;
varying vec3 vertNV;
varying vec3 vertCol;
uniform float u_shininess;
void main()
{
vec3 color = vertCol;
vec3 normalV = normalize( vertNV );
vec3 eyeV = normalize( -vertPos );
vec3 halfV = normalize( eyeV + normalV );
float NdotH = max( 0.0, dot( normalV, halfV ) );
float shineFac = ( u_shininess + 2.0 ) * pow( NdotH, u_shininess ) / ( 2.0 * 3.14159265 );
gl_FragColor = vec4( color.rgb * (0.2 + NdotH), 1.0 );
}
</script>
<script id="post-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
varying vec2 pos;
void main()
{
pos = inPos;
gl_Position = vec4( inPos, 0.0, 1.0 );
}
</script>
<script id="blur-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 pos;
uniform sampler2D u_texture;
uniform vec2 u_textureSize;
uniform float u_sigma;
uniform float u_radius;
uniform vec2 u_dir;
float CalcGauss( float x, float sigma )
{
if ( sigma <= 0.0 )
return 0.0;
return exp( -(x*x) / (2.0 * sigma) ) / (2.0 * 3.14157 * sigma);
}
void main()
{
vec2 texC = pos.st * 0.5 + 0.5;
vec4 texCol = texture2D( u_texture, texC );
vec4 gaussCol = vec4( texCol.rgb, 1.0 );
vec2 step = u_dir / u_textureSize;
for ( int i = 1; i <= 32; ++ i )
{
float weight = CalcGauss( float(i) / 32.0, u_sigma * 0.5 );
if ( weight < 1.0/255.0 )
break;
texCol = texture2D( u_texture, texC + u_radius * step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
texCol = texture2D( u_texture, texC - u_radius * step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
}
gaussCol.rgb = clamp( gaussCol.rgb / gaussCol.w, 0.0, 1.0 );
gl_FragColor = vec4( gaussCol.rgb, 1.0 );
}
</script>
<div>
<form id="gui" name="inputs">
<table>
<tr> <td> <font color= #CCF>radius</font> </td>
<td> <input type="range" id="radius" min="1" max="1000" value="1000" onchange="changeEventHandler(event);"/></td> </tr>
<tr> <td> <font color= #CCF>blur</font> </td>
<td> <input type="range" id="sigma" min="1" max="100" value="5" onchange="changeEventHandler(event);"/></td> </tr>
</table>
</form>
</div>
<canvas id="canvas" style="border: none;"></canvas>

Creating a Gradient Color in Fragment Shader

I'm trying to achieve making a gradient color as the design apps (Photoshop for example) does, but can't get the exact result i want.
My shader creates very nice 'gradients' but also contains other colors that are different from the colors I want to switch in.
It looks nice but, my aim is adding blending functions later and make a kind of color correction shader. but first of all I have to get the right colors.
Here is my fragment shader
http://player.thebookofshaders.com/?log=171119111216
uniform vec2 u_resolution;
void main() {
vec2 st = gl_FragCoord.xy/u_resolution.xy;
vec3 color1 = vec3(1.9,0.55,0);
vec3 color2 = vec3(0.226,0.000,0.615);
float mixValue = distance(st,vec2(0,1));
vec3 color = mix(color1,color2,mixValue);
gl_FragColor = vec4(color,mixValue);
}
And here is
Thanks in advance..
In response to just the title of your question you might also want to consider doing the mix in other color spaces. Your code is mixing in RGB space but you'll get different results in different spaces.
Example
const gl = document.createElement("canvas").getContext("webgl");
gl.canvas.width = 100;
gl.canvas.height = 100;
gl.viewport(0, 0, 100, 100);
const vsrc = `
void main() {
gl_PointSize = 100.0;
gl_Position = vec4(0, 0, 0, 1);
}
`;
const fRGB = `
precision mediump float;
uniform vec3 color1;
uniform vec3 color2;
void main() {
vec2 st = gl_PointCoord;
float mixValue = distance(st, vec2(0, 1));
vec3 color = mix(color1, color2, mixValue);
gl_FragColor = vec4(color, 1);
}
`;
const fHSV = `
precision mediump float;
uniform vec3 color1;
uniform vec3 color2;
// from: http://lolengine.net/blog/2013/07/27/rgb-to-hsv-in-glsl
vec3 rgb2hsv(vec3 c) {
vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
float d = q.x - min(q.w, q.y);
float e = 1.0e-10;
return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
}
vec3 hsv2rgb(vec3 c) {
c = vec3(c.x, clamp(c.yz, 0.0, 1.0));
vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}
void main() {
vec2 st = gl_PointCoord;
float mixValue = distance(st, vec2(0, 1));
vec3 hsv1 = rgb2hsv(color1);
vec3 hsv2 = rgb2hsv(color2);
// mix hue in toward closest direction
float hue = (mod(mod((hsv2.x - hsv1.x), 1.) + 1.5, 1.) - 0.5) * mixValue + hsv1.x;
vec3 hsv = vec3(hue, mix(hsv1.yz, hsv2.yz, mixValue));
vec3 color = hsv2rgb(hsv);
gl_FragColor = vec4(color, 1);
}
`;
const fHSL = `
precision mediump float;
uniform vec3 color1;
uniform vec3 color2;
const float Epsilon = 1e-10;
vec3 rgb2hcv(in vec3 RGB)
{
// Based on work by Sam Hocevar and Emil Persson
vec4 P = lerp(vec4(RGB.bg, -1.0, 2.0/3.0), vec4(RGB.gb, 0.0, -1.0/3.0), step(RGB.b, RGB.g));
vec4 Q = mix(vec4(P.xyw, RGB.r), vec4(RGB.r, P.yzx), step(P.x, RGB.r));
float C = Q.x - min(Q.w, Q.y);
float H = abs((Q.w - Q.y) / (6. * C + Epsilon) + Q.z);
return vec3(H, C, Q.x);
}
vec3 rgb2hsl(in vec3 RGB)
{
vec3 HCV = rgb2hcv(RGB);
float L = HCV.z - HCV.y * 0.5;
float S = HCV.y / (1 - abs(L * 2. - 1.) + Epsilon);
return vec3(HCV.x, S, L);
}
vec3 hsl2rgb(vec3 c)
{
c = vec3(fract(c.x), clamp(c.yz, 0.0, 1.0));
vec3 rgb = clamp(abs(mod(c.x * 6.0 + vec3(0.0, 4.0, 2.0), 6.0) - 3.0) - 1.0, 0.0, 1.0);
return c.z + c.y * (rgb - 0.5) * (1.0 - abs(2.0 * c.z - 1.0));
}
void main() {
vec2 st = gl_PointCoord;
float mixValue = distance(st, vec2(0, 1));
vec3 hsl1 = rgb2hsl(color1);
vec3 hsl2 = rgb2hsl(color2);
// mix hue in toward closest direction
float hue = (mod(mod((hsl2.x - hsl1.x), 1.) + 1.5, 1.) - 0.5) * mixValue + hsl1.x;
vec3 hsl = vec3(hue, mix(hsl1.yz, hsl2.yz, mixValue));
vec3 color = hsl2rgb(hsv);
gl_FragColor = vec4(color, 1);
}
`;
const fLAB = `
precision mediump float;
uniform vec3 color1;
uniform vec3 color2;
// from: https://code.google.com/archive/p/flowabs/source
vec3 rgb2xyz( vec3 c ) {
vec3 tmp;
tmp.x = ( c.r > 0.04045 ) ? pow( ( c.r + 0.055 ) / 1.055, 2.4 ) : c.r / 12.92;
tmp.y = ( c.g > 0.04045 ) ? pow( ( c.g + 0.055 ) / 1.055, 2.4 ) : c.g / 12.92,
tmp.z = ( c.b > 0.04045 ) ? pow( ( c.b + 0.055 ) / 1.055, 2.4 ) : c.b / 12.92;
return 100.0 * tmp *
mat3( 0.4124, 0.3576, 0.1805,
0.2126, 0.7152, 0.0722,
0.0193, 0.1192, 0.9505 );
}
vec3 xyz2lab( vec3 c ) {
vec3 n = c / vec3( 95.047, 100, 108.883 );
vec3 v;
v.x = ( n.x > 0.008856 ) ? pow( n.x, 1.0 / 3.0 ) : ( 7.787 * n.x ) + ( 16.0 / 116.0 );
v.y = ( n.y > 0.008856 ) ? pow( n.y, 1.0 / 3.0 ) : ( 7.787 * n.y ) + ( 16.0 / 116.0 );
v.z = ( n.z > 0.008856 ) ? pow( n.z, 1.0 / 3.0 ) : ( 7.787 * n.z ) + ( 16.0 / 116.0 );
return vec3(( 116.0 * v.y ) - 16.0, 500.0 * ( v.x - v.y ), 200.0 * ( v.y - v.z ));
}
vec3 rgb2lab(vec3 c) {
vec3 lab = xyz2lab( rgb2xyz( c ) );
return vec3( lab.x / 100.0, 0.5 + 0.5 * ( lab.y / 127.0 ), 0.5 + 0.5 * ( lab.z / 127.0 ));
}
vec3 lab2xyz( vec3 c ) {
float fy = ( c.x + 16.0 ) / 116.0;
float fx = c.y / 500.0 + fy;
float fz = fy - c.z / 200.0;
return vec3(
95.047 * (( fx > 0.206897 ) ? fx * fx * fx : ( fx - 16.0 / 116.0 ) / 7.787),
100.000 * (( fy > 0.206897 ) ? fy * fy * fy : ( fy - 16.0 / 116.0 ) / 7.787),
108.883 * (( fz > 0.206897 ) ? fz * fz * fz : ( fz - 16.0 / 116.0 ) / 7.787)
);
}
vec3 xyz2rgb( vec3 c ) {
vec3 v = c / 100.0 * mat3(
3.2406, -1.5372, -0.4986,
-0.9689, 1.8758, 0.0415,
0.0557, -0.2040, 1.0570
);
vec3 r;
r.x = ( v.r > 0.0031308 ) ? (( 1.055 * pow( v.r, ( 1.0 / 2.4 ))) - 0.055 ) : 12.92 * v.r;
r.y = ( v.g > 0.0031308 ) ? (( 1.055 * pow( v.g, ( 1.0 / 2.4 ))) - 0.055 ) : 12.92 * v.g;
r.z = ( v.b > 0.0031308 ) ? (( 1.055 * pow( v.b, ( 1.0 / 2.4 ))) - 0.055 ) : 12.92 * v.b;
return r;
}
vec3 lab2rgb(vec3 c) {
return xyz2rgb( lab2xyz( vec3(100.0 * c.x, 2.0 * 127.0 * (c.y - 0.5), 2.0 * 127.0 * (c.z - 0.5)) ) );
}
void main() {
vec2 st = gl_PointCoord;
float mixValue = distance(st, vec2(0, 1));
vec3 lab1 = rgb2lab(color1);
vec3 lab2 = rgb2lab(color2);
vec3 lab = mix(lab1, lab2, mixValue);
vec3 color = lab2rgb(lab);
gl_FragColor = vec4(color, 1);
}
`;
function draw(gl, shaders, color1, color2, label) {
const programInfo = twgl.createProgramInfo(gl, shaders);
gl.useProgram(programInfo.program);
twgl.setUniforms(programInfo, {
color1: color1,
color2: color2,
});
gl.drawArrays(gl.POINTS, 0, 1);
const div = document.createElement("div");
const img = new Image();
img.src = gl.canvas.toDataURL();
div.appendChild(img);
const inner = document.createElement("span");
inner.textContent = label;
div.appendChild(inner);
document.body.appendChild(div);
}
const color1 = [1.0, 0.55, 0];
const color2 = [0.226, 0.000, 0.615];
draw(gl, [vsrc, fRGB], color1, color2, "rgb");
draw(gl, [vsrc, fHSV], color1, color2, "hsv");
draw(gl, [vsrc, fHSV], color1, color2, "hsl");
draw(gl, [vsrc, fLAB], color1, color2, "lab");
img { border: 1px solid black; margin: 2px; }
span { display: block; }
div { display: inline-block; text-align: center; }
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
I would also suggest passing your colors in in a ramp texture. An Nx1 texture and using
color = texture2D(
rampTexture,
vec2((mixValue * (rampWidth - 1.) + .5) / rampWidth, 0.5)).rgb;
Then you can easily blend across 2 colors, 3 colors, 20 colors. You can space out the colors as well by repeating colors etc..
Example:
const gl = document.createElement("canvas").getContext("webgl");
gl.canvas.width = 100;
gl.canvas.height = 100;
gl.viewport(0, 0, 100, 100);
const vsrc = `
void main() {
gl_PointSize = 100.0;
gl_Position = vec4(0, 0, 0, 1);
}
`;
const fsrc = `
precision mediump float;
uniform sampler2D rampTexture;
uniform float rampWidth;
void main() {
vec2 st = gl_PointCoord;
float mixValue = distance(st, vec2(0, 1));
vec3 color = texture2D(
rampTexture,
vec2((mixValue * (rampWidth - 1.) + .5) / rampWidth, 0.5)).rgb;
gl_FragColor = vec4(color, 1);
}
`;
const programInfo = twgl.createProgramInfo(gl, [vsrc, fsrc]);
gl.useProgram(programInfo.program);
const tex = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, tex);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
function draw(gl, ramp, label) {
const width = ramp.length;
gl.bindTexture(gl.TEXTURE_2D, tex);
const level = 0;
const internalFormat = gl.RGB;
const height = 1;
const border = 0;
const format = gl.RGB;
const type = gl.UNSIGNED_BYTE;
const rampData = new Uint8Array([].concat(...ramp).map(v => v * 255));
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border,
format, type, rampData);
twgl.setUniforms(programInfo, {
rampTexture: tex,
rampWidth: width,
});
gl.drawArrays(gl.POINTS, 0, 1);
const div = document.createElement("div");
const img = new Image();
img.src = gl.canvas.toDataURL();
div.appendChild(img);
const inner = document.createElement("span");
inner.textContent = label;
div.appendChild(inner);
document.body.appendChild(div);
}
const color1 = [1.0, 0.55, 0];
const color2 = [0.226, 0.000, 0.615];
const r = [1, 0, 0];
const g = [0, 1, 0];
const b = [0, 0, 1];
const w = [1, 1, 1];
draw(gl, [color1, color2], "color1->color2");
draw(gl, [r, g], "red->green");
draw(gl, [r, g, b], "r->g->b");
draw(gl, [r, b, r, b, r], "r->b->r->b->r");
draw(gl, [g, b, b, b, g], "g->b->b->b->g");
img { border: 1px solid black; margin: 2px; }
span { display: block; }
div { display: inline-block; text-align: center; }
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
Note: a 1 dimensional 256x1 texture is how Chrome, Firefox, and Android render both linear and radial gradients. See src
Ther is a simple mistake when you set up the color value. You used 1.9 for the value of red color channel instead of 1.0, when you set up the orange color.
Change your code to:
vec3 color1 = vec3(1.0, 0.55, 0.0); // 1.0 insted of 1.9
Note, the final color channels are clamped to [0, 1], but since you use mix to interpolate the colors, a color channel above 1.0 raises the part of the color in the gradient.
Preview:
var ShaderProgram = {};
ShaderProgram.Create = function( shaderList ) {
var shaderObjs = [];
for ( var i_sh = 0; i_sh < shaderList.length; ++ i_sh ) {
var shderObj = this.CompileShader( shaderList[i_sh].source, shaderList[i_sh].stage );
if ( shderObj == 0 )
return 0;
shaderObjs.push( shderObj );
}
var progObj = this.LinkProgram( shaderObjs )
if ( progObj != 0 ) {
progObj.attribIndex = {};
var noOfAttributes = gl.getProgramParameter( progObj, gl.ACTIVE_ATTRIBUTES );
for ( var i_n = 0; i_n < noOfAttributes; ++ i_n ) {
var name = gl.getActiveAttrib( progObj, i_n ).name;
progObj.attribIndex[name] = gl.getAttribLocation( progObj, name );
}
progObj.unifomLocation = {};
var noOfUniforms = gl.getProgramParameter( progObj, gl.ACTIVE_UNIFORMS );
for ( var i_n = 0; i_n < noOfUniforms; ++ i_n ) {
var name = gl.getActiveUniform( progObj, i_n ).name;
progObj.unifomLocation[name] = gl.getUniformLocation( progObj, name );
}
}
return progObj;
}
ShaderProgram.AttributeIndex = function( progObj, name ) { return progObj.attribIndex[name]; }
ShaderProgram.UniformLocation = function( progObj, name ) { return progObj.unifomLocation[name]; }
ShaderProgram.Use = function( progObj ) { gl.useProgram( progObj ); }
ShaderProgram.SetUniformF2 = function( progObj, name, arr ) { if(progObj.unifomLocation[name]) gl.uniform2fv( progObj.unifomLocation[name], arr ); }
ShaderProgram.CompileShader = function( source, shaderStage ) {
var shaderScript = document.getElementById(source);
if (shaderScript) {
source = "";
var node = shaderScript.firstChild;
while (node) {
if (node.nodeType == 3) source += node.textContent;
node = node.nextSibling;
}
}
var shaderObj = gl.createShader( shaderStage );
gl.shaderSource( shaderObj, source );
gl.compileShader( shaderObj );
var status = gl.getShaderParameter( shaderObj, gl.COMPILE_STATUS );
if ( !status ) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : 0;
}
ShaderProgram.LinkProgram = function( shaderObjs ) {
var prog = gl.createProgram();
for ( var i_sh = 0; i_sh < shaderObjs.length; ++ i_sh )
gl.attachShader( prog, shaderObjs[i_sh] );
gl.linkProgram( prog );
status = gl.getProgramParameter( prog, gl.LINK_STATUS );
if ( !status ) alert("Could not initialise shaders");
gl.useProgram( null );
return status ? prog : 0;
}
function drawScene(){
var canvas = document.getElementById( "ogl-canvas" );
var vp = [canvas.width, canvas.height];
gl.viewport( 0, 0, canvas.width, canvas.height );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
ShaderProgram.Use( progDraw );
ShaderProgram.SetUniformF2( progDraw, "u_resolution", vp )
gl.enableVertexAttribArray( progDraw.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.vertexAttribPointer( progDraw.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
gl.drawElements( gl.TRIANGLES, bufObj.inx.len, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progDraw.pos );
}
var gl;
var prog;
var bufObj = {};
function sceneStart() {
var canvas = document.getElementById( "ogl-canvas");
gl = canvas.getContext( "experimental-webgl", { premultipliedAlpha: true } );
if ( !gl )
return;
progDraw = ShaderProgram.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
progDraw.inPos = gl.getAttribLocation( progDraw, "inPos" );
if ( prog == 0 )
return;
var pos = [ -1, -1, 1, -1, 1, 1, -1, 1 ];
var inx = [ 0, 1, 2, 0, 2, 3 ];
bufObj.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( pos ), gl.STATIC_DRAW );
bufObj.inx = gl.createBuffer();
bufObj.inx.len = inx.length;
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( inx ), gl.STATIC_DRAW );
setInterval(drawScene, 50);
}
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
varying vec2 vertPos;
void main()
{
vertPos = inPos;
gl_Position = vec4( inPos.xy, 0.0, 1.0 );
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 vertPos;
uniform vec2 u_resolution;
void main()
{
vec2 st = gl_FragCoord.xy/u_resolution.xy;
vec3 color1 = vec3(1.0,0.55,0);
vec3 color2 = vec3(0.226,0.000,0.615);
float mixValue = distance(st,vec2(0,1));
vec3 color = mix(color1,color2,mixValue);
gl_FragColor = vec4(color,1.0);
}
</script>
<body onload="sceneStart();">
<canvas id="ogl-canvas" style="border: none;" width="256" height="256"></canvas>
</body>

GLSL spotlight projection volume

In my open source project I have setup a deferred rendering pipeline using Qt3D. So far so good, but now I'd like to move forward by adding spotlights projection volume. (e.g. as if there is smoke in the scene)
Like this:
The fragment shader I'm using is at the end of the question.
I've read that for each fragment I should do ray marching from the light position and find the intersections with a cone, but I have no idea how to translate this into GLSL.
I can easily add a uniform with the depth map (from camera point of view) coming from the GBuffer, but I don't know if that's of any help.
Since my GLSL knowledge is very limited, please reply with actual code, not a lengthy mathematical explanation that I won't be able to understand/translate into code. Please be patient with me.
uniform sampler2D color;
uniform sampler2D position;
uniform sampler2D normal;
uniform vec2 winSize;
out vec4 fragColor;
const int MAX_LIGHTS = 102;
const int TYPE_POINT = 0;
const int TYPE_DIRECTIONAL = 1;
const int TYPE_SPOT = 2;
struct Light {
int type;
vec3 position;
vec3 color;
float intensity;
vec3 direction;
float constantAttenuation;
float linearAttenuation;
float quadraticAttenuation;
float cutOffAngle;
};
uniform Light lightsArray[MAX_LIGHTS];
uniform int lightsNumber;
void main()
{
vec2 texCoord = gl_FragCoord.xy / winSize;
vec4 col = texture(color, texCoord);
vec3 pos = texture(position, texCoord).xyz;
vec3 norm = texture(normal, texCoord).xyz;
vec3 lightColor = vec3(0.0);
vec3 s;
float att;
for (int i = 0; i < lightsNumber; ++i) {
att = 1.0;
if ( lightsArray[i].type != TYPE_DIRECTIONAL ) {
s = lightsArray[i].position - pos;
if (lightsArray[i].constantAttenuation != 0.0
|| lightsArray[i].linearAttenuation != 0.0
|| lightsArray[i].quadraticAttenuation != 0.0) {
float dist = length(s);
att = 1.0 / (lightsArray[i].constantAttenuation + lightsArray[i].linearAttenuation * dist + lightsArray[i].quadraticAttenuation * dist * dist);
}
s = normalize( s );
if ( lightsArray[i].type == TYPE_SPOT ) {
if ( degrees(acos(dot(-s, normalize(lightsArray[i].direction))) ) > lightsArray[i].cutOffAngle)
att = 0.0;
}
} else {
s = normalize(-lightsArray[i].direction);
}
float diffuse = max( dot( s, norm ), 0.0 );
lightColor += att * lightsArray[i].intensity * diffuse * lightsArray[i].color;
}
fragColor = vec4(col.rgb * lightColor, col.a);
}
This is how a spotlight looks like with the original shader above:
[EDIT - SOLVED] Thanks to Rabbid76 excellent answer and precious support
This is the modified code to see the cone projection:
#version 140
uniform sampler2D color;
uniform sampler2D position;
uniform sampler2D normal;
uniform vec2 winSize;
out vec4 fragColor;
const int MAX_LIGHTS = 102;
const int TYPE_POINT = 0;
const int TYPE_DIRECTIONAL = 1;
const int TYPE_SPOT = 2;
struct Light {
int type;
vec3 position;
vec3 color;
float intensity;
vec3 direction;
float constantAttenuation;
float linearAttenuation;
float quadraticAttenuation;
float cutOffAngle;
};
uniform Light lightsArray[MAX_LIGHTS];
uniform int lightsNumber;
uniform mat4 inverseViewMatrix; // defined by camera position, camera target and up vector
void main()
{
vec2 texCoord = gl_FragCoord.xy / winSize;
vec4 col = texture(color, texCoord);
vec3 pos = texture(position, texCoord).xyz;
vec3 norm = texture(normal, texCoord).xyz;
vec3 lightColor = vec3(0.0);
vec3 s;
// calculate unprojected fragment position on near plane and line of sight relative to view
float nearZ = -1.0;
vec3 nearPos = vec3( (texCoord.x - 0.5) * winSize.x / winSize.y, texCoord.y - 0.5, nearZ ); // 1.0 is camera near
vec3 los = normalize( nearPos );
// ray definition
vec3 O = vec3( inverseViewMatrix * vec4( 0.0, 0.0, 0.0, 1.0 ) ); // translation part of the camera matrix, which is equal to the camera position
vec3 D = (length(pos) > 0.0) ? normalize(pos - O) : (mat3(inverseViewMatrix) * los);
for (int i = 0; i < lightsNumber; ++i)
{
float att = 1.0;
if ( lightsArray[i].type == TYPE_DIRECTIONAL )
{
s = normalize( -lightsArray[i].direction );
}
else
{
s = lightsArray[i].position - pos;
if (lightsArray[i].type != TYPE_SPOT
&& (lightsArray[i].constantAttenuation != 0.0
|| lightsArray[i].linearAttenuation != 0.0
|| lightsArray[i].quadraticAttenuation != 0.0))
{
float dist = length(s);
att = 1.0 / (lightsArray[i].constantAttenuation + lightsArray[i].linearAttenuation * dist + lightsArray[i].quadraticAttenuation * dist * dist);
}
s = normalize( s );
if ( lightsArray[i].type == TYPE_SPOT )
{
// cone definition
vec3 C = lightsArray[i].position;
vec3 V = normalize(lightsArray[i].direction);
float cosTh = cos( radians(lightsArray[i].cutOffAngle) );
// ray - cone intersection
vec3 CO = O - C;
float DdotV = dot( D, V );
float COdotV = dot( CO, V );
float a = DdotV * DdotV - cosTh * cosTh;
float b = 2.0 * (DdotV * COdotV - dot( D, CO ) * cosTh * cosTh);
float c = COdotV * COdotV - dot( CO, CO ) * cosTh * cosTh;
float det = b * b - 4.0 * a * c;
// find intersection
float isIsect = 0.0;
vec3 isectP = vec3(0.0);
if ( det >= 0.0 )
{
vec3 P1 = O + (-b - sqrt(det)) / (2.0 * a) * D;
vec3 P2 = O + (-b + sqrt(det)) / (2.0 * a) * D;
float isect1 = step( 0.0, dot(normalize(P1 - C), V) );
float isect2 = step( 0.0, dot(normalize(P2 - C), V) );
if ( isect1 < 0.5 )
{
P1 = P2;
isect1 = isect2;
}
if ( isect2 < 0.5 )
{
P2 = P1;
isect2 = isect1;
}
isectP = (length(P1 - O) < length(P2 - O)) ? P1 : P2;
isIsect = mix( isect2, 1.0, isect1 );
if ( length(pos) != 0.0 && length(isectP - O) > length(pos - O))
isIsect = 0.0;
}
float dist = length( isectP - C.xyz );
float limit = degrees(acos(dot(-s, normalize(lightsArray[i].direction))) );
if (isIsect > 0 || limit <= lightsArray[i].cutOffAngle)
{
att = 1.0 / dot( vec3( 1.0, dist, dist * dist ),
vec3(lightsArray[i].constantAttenuation,
lightsArray[i].linearAttenuation,
lightsArray[i].quadraticAttenuation) );
}
else
att = 0.0;
}
}
float diffuse = max( dot( s, norm ), 0.0 );
lightColor += att * lightsArray[i].intensity * diffuse * lightsArray[i].color;
}
fragColor = vec4(col.rgb * lightColor, col.a);
}
Uniforms passed to the shader are:
qml: lightsArray[0].type = 0
qml: lightsArray[0].position = QVector3D(0, 10, 0)
qml: lightsArray[0].color = #ffffff
qml: lightsArray[0].intensity = 0.8
qml: lightsArray[0].constantAttenuation = 1
qml: lightsArray[0].linearAttenuation = 0
qml: lightsArray[0].quadraticAttenuation = 0
qml: lightsArray[1].type = 2
qml: lightsArray[1].position = QVector3D(0, 3, 0)
qml: lightsArray[1].color = #008000
qml: lightsArray[1].intensity = 0.5
qml: lightsArray[1].constantAttenuation = 2
qml: lightsArray[1].linearAttenuation = 0
qml: lightsArray[1].quadraticAttenuation = 0
qml: lightsArray[1].direction = QVector3D(-0.573576, -0.819152, 0)
qml: lightsArray[1].cutOffAngle = 15
qml: lightsNumber = 2
Screenshot:
For a primitive visualization of the light cone of a spot light, you have to do a intersection of the line of sight and the light cone. The following algorithm works in a perpectiv view and the caluclations ar done in view (eye) sapce. The algorithm does not care about the geometry of the scene and does not do any depth test or shadow test, it only is a overlayerd visualization of the light cone.
The line of sight in a perspective view can be deifned by a points and a direction. Since the calculations is done in view (eye) space, the point is the point of view (the origin of the view frustum) which is vec3(0.0). The direction can easily be determined, by the intersection of the line of sight and the near plane of the camera frustum. This can easily be calculated if the projected XY-coordinate of the fragment is known in normalized device coordinates (the lower left point is (-1,-1) and the upper right point is (1,1) see the answer to this question).
float aspect = .....; // ratio of the view port (widht/length)
float fov = .....; // filed of view angle in radians (angle of the camera frustum on the Y-axis)
vec2 ndcPos = .....; // fragment position in NDC space from (-1,-1) to (1,1)
vec3 tanFov = tan( fov * 0.5 );
vec3 los = normalize( vec3( ndcPos.x * aspect * tanFov, ndcPos.y * tanFov, -1.0 ) );
The light cone is defined by the origin of the light source, the direction where the light source points to, and the full angle of the light cone. The position and the direction have to be up in view space. The angle has to be set up in radians.
vec3 vLightPos = .....; // position of the light source in view space
vec3 vLightDir = .....; // direction of the light in view space
float coneAngle = .....; // full angle of the light cone in radians
How to calculate the intersection point(s) of a ray and a cone can be found in the answer to Stackoverflow question Points of intersection of vector with cone and in the following paper: Intersection of a ray and a cone.
The following code calculates a intersection of a ray and a cone as defined above. The result point is stored in isectP. The variable isIsect of the type float is set to 1.0 if there is a intersection, and is set to 0.0 else.
// ray definition
vec3 O = vec3(0.0);
vec3 D = los;
// cone definition
vec3 C = vLightPos;
vec3 V = vLightDir;
float cosTh = cos( coneAngle * 0.5 );
// ray - cone intersection
vec3 CO = O - C;
float DdotV = dot( D, V );
float COdotV = dot( CO, V );
float a = DdotV*DdotV - cosTh*cosTh;
float b = 2.0 * (DdotV*COdotV - dot( D, CO )*cosTh*cosTh);
float c = COdotV*COdotV - dot( CO, CO )*cosTh*cosTh;
float det = b*b - 4.0*a*c;
// find intersection
float isIsect = 0.0;
vec3 isectP = vec3(0.0);
if ( det >= 0.0 )
{
vec3 P1 = O + (-b-sqrt(det))/(2.0*a) * D;
vec3 P2 = O + (-b+sqrt(det))/(2.0*a) * D;
float isect1 = step( 0.0, dot(normalize(P1-C), V) );
float isect2 = step( 0.0, dot(normalize(P2-C), V) );
P1 = mix( P2, P1, isect1 );
isectP = P2.z < 0.0 && P2.z > P1.z ? P2 : P1;
isIsect = mix( isect2, 1.0, isect1 ) * step( isectP.z, 0.0 );
}
For the full GLSL code, see the following WebGL example:
(function loadscene() {
var sliderScale = 100.0
var gl, canvas, vp_size, camera, progDraw, progLightCone, bufTorus = {}, bufQuad = {}, drawFB;
function render(deltaMS) {
var ambient = document.getElementById( "ambient" ).value / sliderScale;
var diffuse = document.getElementById( "diffuse" ).value / sliderScale;
var specular = document.getElementById( "specular" ).value / sliderScale;
var shininess = document.getElementById( "shininess" ).value;
var cutOffAngle = document.getElementById( "cutOffAngle" ).value;
// setup view projection and model
vp_size = [canvas.width, canvas.height];
var prjMat = camera.Perspective();
var viewMat = camera.LookAt();
var modelMat = IdentM44();
modelMat = RotateAxis( modelMat, CalcAng( deltaMS, 13.0 ), 0 );
modelMat = RotateAxis( modelMat, CalcAng( deltaMS, 17.0 ), 1 );
var lightPos = [0.95, 0.95, -1.0];
var lightDir = [-1.0, -1.0, -3.0];
var lightCutOffAngleRad = cutOffAngle * Math.PI / 180.0;
var lightAtt = [0.7, 0.1, 0.5];
drawFB.Bind( true );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
ShProg.Use( progDraw );
ShProg.SetM44( progDraw, "u_projectionMat44", prjMat );
ShProg.SetM44( progDraw, "u_viewMat44", viewMat );
ShProg.SetF3( progDraw, "u_light.position", lightPos );
ShProg.SetF3( progDraw, "u_light.direction", lightDir );
ShProg.SetF1( progDraw, "u_light.ambient", ambient );
ShProg.SetF1( progDraw, "u_light.diffuse", diffuse );
ShProg.SetF1( progDraw, "u_light.specular", specular );
ShProg.SetF1( progDraw, "u_light.shininess", shininess );
ShProg.SetF3( progDraw, "u_light.attenuation", lightAtt );
ShProg.SetF1( progDraw, "u_light.cutOffAngle", lightCutOffAngleRad );
ShProg.SetM44( progDraw, "u_modelMat44", modelMat );
bufObj = bufTorus;
gl.enableVertexAttribArray( progDraw.inPos );
gl.enableVertexAttribArray( progDraw.inNV );
gl.enableVertexAttribArray( progDraw.inCol );
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.vertexAttribPointer( progDraw.inPos, 3, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.nv );
gl.vertexAttribPointer( progDraw.inNV, 3, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.col );
gl.vertexAttribPointer( progDraw.inCol, 3, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
gl.drawElements( gl.TRIANGLES, bufObj.inxLen, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progDraw.pos );
gl.disableVertexAttribArray( progDraw.nv );
gl.disableVertexAttribArray( progDraw.col );
drawFB.Release( true );
gl.viewport( 0, 0, canvas.width, canvas.height );
var texUnitDraw = 2;
drawFB.BindTexture( texUnitDraw );
ShProg.Use( progLightCone );
ShProg.SetI1( progLightCone, "u_colorAttachment0", texUnitDraw );
ShProg.SetF2( progLightCone, "u_depthRange", [ camera.near, camera.far ] );
ShProg.SetF2( progLightCone, "u_vp", camera.vp );
ShProg.SetF1( progLightCone, "u_fov", camera.fov_y * Math.PI / 180.0 );
ShProg.SetF3( progLightCone, "u_light.position", lightPos );
ShProg.SetF3( progLightCone, "u_light.direction", lightDir );
ShProg.SetF3( progLightCone, "u_light.attenuation", lightAtt );
ShProg.SetF1( progLightCone, "u_light.cutOffAngle", lightCutOffAngleRad );
gl.enableVertexAttribArray( progLightCone.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.vertexAttribPointer( progLightCone.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.drawElements( gl.TRIANGLES, bufQuad.inxLen, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progLightCone.inPos );
requestAnimationFrame(render);
}
function initScene() {
canvas = document.getElementById( "glow-canvas");
vp_size = [canvas.width, canvas.height];
gl = canvas.getContext( "experimental-webgl" );
if ( !gl )
return;
document.getElementById( "ambient" ).value = 0.25 * sliderScale;
document.getElementById( "diffuse" ).value = 1.0 * sliderScale;
document.getElementById( "specular" ).value = 1.0 * sliderScale;
document.getElementById( "shininess" ).value = 10.0;
document.getElementById( "cutOffAngle" ).value = 30.0;
progDraw = ShProg.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
progDraw.inPos = ShProg.AttrI( progDraw, "inPos" );
progDraw.inNV = ShProg.AttrI( progDraw, "inNV" );
progDraw.inCol = ShProg.AttrI( progDraw, "inCol" );
if ( progDraw == 0 )
return;
progLightCone = ShProg.Create(
[ { source : "light-cone-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "light-cone-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
progLightCone.inPos = ShProg.AttrI( progDraw, "inPos" );
if ( progDraw == 0 )
return;
var circum_size = 32, tube_size = 32;
var rad_circum = 1.5;
var rad_tube = 0.8;
var torus_pts = [];
var torus_nv = [];
var torus_col = [];
var torus_inx = [];
var col = [1, 0.5, 0.0];
for ( var i_c = 0; i_c < circum_size; ++ i_c ) {
var center = [
Math.cos(2 * Math.PI * i_c / circum_size),
Math.sin(2 * Math.PI * i_c / circum_size) ]
for ( var i_t = 0; i_t < tube_size; ++ i_t ) {
var tubeX = Math.cos(2 * Math.PI * i_t / tube_size)
var tubeY = Math.sin(2 * Math.PI * i_t / tube_size)
var pt = [
center[0] * ( rad_circum + tubeX * rad_tube ),
center[1] * ( rad_circum + tubeX * rad_tube ),
tubeY * rad_tube ]
var nv = [ pt[0] - center[0] * rad_tube, pt[1] - center[1] * rad_tube, tubeY * rad_tube ]
torus_pts.push( pt[0], pt[1], pt[2] );
torus_nv.push( nv[0], nv[1], nv[2] );
torus_col.push( col[0], col[1], col[2] );
var i_cn = (i_c+1) % circum_size
var i_tn = (i_t+1) % tube_size
var i_c0 = i_c * tube_size;
var i_c1 = i_cn * tube_size;
torus_inx.push( i_c0+i_t, i_c0+i_tn, i_c1+i_t, i_c0+i_tn, i_c1+i_t, i_c1+i_tn )
}
}
bufTorus.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufTorus.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( torus_pts ), gl.STATIC_DRAW );
bufTorus.nv = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufTorus.nv );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( torus_nv ), gl.STATIC_DRAW );
bufTorus.col = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufTorus.col );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( torus_col ), gl.STATIC_DRAW );
bufTorus.inx = gl.createBuffer();
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufTorus.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( torus_inx ), gl.STATIC_DRAW );
bufTorus.inxLen = torus_inx.length;
bufQuad.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( [ -1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0 ] ), gl.STATIC_DRAW );
bufQuad.inx = gl.createBuffer();
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( [ 0, 1, 2, 0, 2, 3 ] ), gl.STATIC_DRAW );
bufQuad.inxLen = 6;
camera = new Camera( [0, 4, 0.0], [0, 0, 0], [0, 0, 1], 90, vp_size, 0.5, 100 );
window.onresize = resize;
resize();
requestAnimationFrame(render);
}
function resize() {
//vp_size = [gl.drawingBufferWidth, gl.drawingBufferHeight];
vp_size = [window.innerWidth, window.innerHeight]
//vp_size = [256, 256]
canvas.width = vp_size[0];
canvas.height = vp_size[1];
var fbsize = Math.max(vp_size[0], vp_size[1]);
fbsize = 1 << 31 - Math.clz32(fbsize); // nearest power of 2
var fb_rect = [fbsize, fbsize];
drawFB = FrameBuffer.Create( fb_rect );
}
function Fract( val ) {
return val - Math.trunc( val );
}
function CalcAng( deltaMS, intervall ) {
return Fract( deltaMS / (1000*intervall) ) * 2.0 * Math.PI;
}
function CalcMove( deltaMS, intervall, range ) {
var pos = self.Fract( deltaMS / (1000*intervall) ) * 2.0
var pos = pos < 1.0 ? pos : (2.0-pos)
return range[0] + (range[1] - range[0]) * pos;
}
function IdentM44() { return [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1]; }
function RotateAxis(matA, angRad, axis) {
var aMap = [ [1, 2], [2, 0], [0, 1] ];
var a0 = aMap[axis][0], a1 = aMap[axis][1];
var sinAng = Math.sin(angRad), cosAng = Math.cos(angRad);
var matB = matA.slice(0);
for ( var i = 0; i < 3; ++ i ) {
matB[a0*4+i] = matA[a0*4+i] * cosAng + matA[a1*4+i] * sinAng;
matB[a1*4+i] = matA[a0*4+i] * -sinAng + matA[a1*4+i] * cosAng;
}
return matB;
}
function Cross( a, b ) { return [ a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0], 0.0 ]; }
function Dot( a, b ) { return a[0]*b[0] + a[1]*b[1] + a[2]*b[2]; }
function Normalize( v ) {
var len = Math.sqrt( v[0] * v[0] + v[1] * v[1] + v[2] * v[2] );
return [ v[0] / len, v[1] / len, v[2] / len ];
}
Camera = function( pos, target, up, fov_y, vp, near, far ) {
this.Time = function() { return Date.now(); }
this.pos = pos;
this.target = target;
this.up = up;
this.fov_y = fov_y;
this.vp = vp;
this.near = near;
this.far = far;
this.orbit_mat = this.current_orbit_mat = this.model_mat = this.current_model_mat = IdentM44();
this.mouse_drag = this.auto_spin = false;
this.auto_rotate = true;
this.mouse_start = [0, 0];
this.mouse_drag_axis = [0, 0, 0];
this.mouse_drag_angle = 0;
this.mouse_drag_time = 0;
this.drag_start_T = this.rotate_start_T = this.Time();
this.Ortho = function() {
var fn = this.far + this.near;
var f_n = this.far - this.near;
var w = this.vp[0];
var h = this.vp[1];
return [
2/w, 0, 0, 0,
0, 2/h, 0, 0,
0, 0, -2/f_n, 0,
0, 0, -fn/f_n, 1 ];
};
this.Perspective = function() {
var n = this.near;
var f = this.far;
var fn = f + n;
var f_n = f - n;
var r = this.vp[0] / this.vp[1];
var t = 1 / Math.tan( Math.PI * this.fov_y / 360 );
return [
t/r, 0, 0, 0,
0, t, 0, 0,
0, 0, -fn/f_n, -1,
0, 0, -2*f*n/f_n, 0 ];
};
this.LookAt = function() {
var mz = Normalize( [ this.pos[0]-this.target[0], this.pos[1]-this.target[1], this.pos[2]-this.target[2] ] );
var mx = Normalize( Cross( this.up, mz ) );
var my = Normalize( Cross( mz, mx ) );
var tx = Dot( mx, this.pos );
var ty = Dot( my, this.pos );
var tz = Dot( [-mz[0], -mz[1], -mz[2]], this.pos );
return [mx[0], my[0], mz[0], 0, mx[1], my[1], mz[1], 0, mx[2], my[2], mz[2], 0, tx, ty, tz, 1];
};
}
var FrameBuffer = {};
FrameBuffer.Create = function( vp, texturePlan ) {
var texPlan = texturePlan ? new Uint8Array( texturePlan ) : null;
var fb = gl.createFramebuffer();
fb.width = vp[0];
fb.height = vp[1];
gl.bindFramebuffer( gl.FRAMEBUFFER, fb );
fb.color0_texture = gl.createTexture();
gl.bindTexture( gl.TEXTURE_2D, fb.color0_texture );
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, fb.width, fb.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, texPlan );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST );
fb.renderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer( gl.RENDERBUFFER, fb.renderbuffer );
gl.renderbufferStorage( gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, fb.width, fb.height );
gl.framebufferTexture2D( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, fb.color0_texture, 0 );
gl.framebufferRenderbuffer( gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, fb.renderbuffer );
gl.bindTexture( gl.TEXTURE_2D, null );
gl.bindRenderbuffer( gl.RENDERBUFFER, null );
gl.bindFramebuffer( gl.FRAMEBUFFER, null );
fb.Bind = function( clear ) {
gl.bindFramebuffer( gl.FRAMEBUFFER, this );
if ( clear ) {
gl.viewport( 0, 0, this.width, this.height );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
}
};
fb.Release = function( clear ) {
gl.bindFramebuffer( gl.FRAMEBUFFER, null );
if ( clear ) {
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
}
};
fb.BindTexture = function( textureUnit ) {
gl.activeTexture( gl.TEXTURE0 + textureUnit );
gl.bindTexture( gl.TEXTURE_2D, this.color0_texture );
};
return fb;
}
var ShProg = {};
ShProg.Create = function( shaderList ) {
var shaderObjs = [];
for ( var i_sh = 0; i_sh < shaderList.length; ++ i_sh ) {
var shderObj = this.Compile( shaderList[i_sh].source, shaderList[i_sh].stage );
if ( shderObj == 0 )
return 0;
shaderObjs.push( shderObj );
}
var progObj = this.Link( shaderObjs )
if ( progObj != 0 ) {
progObj.attrInx = {};
var noOfAttributes = gl.getProgramParameter( progObj, gl.ACTIVE_ATTRIBUTES );
for ( var i_n = 0; i_n < noOfAttributes; ++ i_n ) {
var name = gl.getActiveAttrib( progObj, i_n ).name;
progObj.attrInx[name] = gl.getAttribLocation( progObj, name );
}
progObj.uniLoc = {};
var noOfUniforms = gl.getProgramParameter( progObj, gl.ACTIVE_UNIFORMS );
for ( var i_n = 0; i_n < noOfUniforms; ++ i_n ) {
var name = gl.getActiveUniform( progObj, i_n ).name;
progObj.uniLoc[name] = gl.getUniformLocation( progObj, name );
}
}
return progObj;
}
ShProg.AttrI = function( progObj, name ) { return progObj.attrInx[name]; }
ShProg.UniformL = function( progObj, name ) { return progObj.uniLoc[name]; }
ShProg.Use = function( progObj ) { gl.useProgram( progObj ); }
ShProg.SetI1 = function( progObj, name, val ) { if(progObj.uniLoc[name]) gl.uniform1i( progObj.uniLoc[name], val ); }
ShProg.SetF1 = function( progObj, name, val ) { if(progObj.uniLoc[name]) gl.uniform1f( progObj.uniLoc[name], val ); }
ShProg.SetF2 = function( progObj, name, arr ) { if(progObj.uniLoc[name]) gl.uniform2fv( progObj.uniLoc[name], arr ); }
ShProg.SetF3 = function( progObj, name, arr ) { if(progObj.uniLoc[name]) gl.uniform3fv( progObj.uniLoc[name], arr ); }
ShProg.SetF4 = function( progObj, name, arr ) { if(progObj.uniLoc[name]) gl.uniform4fv( progObj.uniLoc[name], arr ); }
ShProg.SetM44 = function( progObj, name, mat ) { if(progObj.uniLoc[name]) gl.uniformMatrix4fv( progObj.uniLoc[name], false, mat ); }
ShProg.Compile = function( source, shaderStage ) {
var shaderScript = document.getElementById(source);
if (shaderScript) {
source = "";
var node = shaderScript.firstChild;
while (node) {
if (node.nodeType == 3) source += node.textContent;
node = node.nextSibling;
}
}
var shaderObj = gl.createShader( shaderStage );
gl.shaderSource( shaderObj, source );
gl.compileShader( shaderObj );
var status = gl.getShaderParameter( shaderObj, gl.COMPILE_STATUS );
if ( !status ) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : 0;
}
ShProg.Link = function( shaderObjs ) {
var prog = gl.createProgram();
for ( var i_sh = 0; i_sh < shaderObjs.length; ++ i_sh )
gl.attachShader( prog, shaderObjs[i_sh] );
gl.linkProgram( prog );
status = gl.getProgramParameter( prog, gl.LINK_STATUS );
if ( !status ) alert("Could not initialise shaders");
gl.useProgram( null );
return status ? prog : 0;
}
initScene();
})();
html,body { margin: 0; overflow: hidden; }
#gui { position : absolute; top : 0; left : 0; }
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec3 inPos;
attribute vec3 inNV;
attribute vec3 inCol;
varying vec3 vertPos;
varying vec3 vertNV;
varying vec3 vertCol;
varying vec4 clip_space_pos;
uniform mat4 u_projectionMat44;
uniform mat4 u_viewMat44;
uniform mat4 u_modelMat44;
void main()
{
vec3 modelNV = mat3( u_modelMat44 ) * normalize( inNV );
vertNV = mat3( u_viewMat44 ) * modelNV;
vertCol = inCol;
vec4 modelPos = u_modelMat44 * vec4( inPos, 1.0 );
vec4 viewPos = u_viewMat44 * modelPos;
vertPos = viewPos.xyz / viewPos.w;
gl_Position = u_projectionMat44 * viewPos;
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec3 vertPos;
varying vec3 vertNV;
varying vec3 vertCol;
struct Light {
vec3 position;
vec3 direction;
float ambient;
float diffuse;
float specular;
float shininess;
vec3 attenuation;
float cutOffAngle;
};
uniform Light u_light;
void main()
{
vec3 color = vertCol;
vec3 lightCol = u_light.ambient * color;
vec3 normalV = normalize( vertNV );
vec3 lightV = normalize( u_light.position - vertPos );
float lightD = length( u_light.position - vertPos );
float cosL = dot( normalize( u_light.direction ), -lightV );
float inCone = step( cos( u_light.cutOffAngle * 0.5 ), cosL );
float att = 1.0 / dot( vec3( 1.0, lightD, lightD*lightD ), u_light.attenuation );
float NdotL = max( 0.0, dot( normalV, lightV ) );
lightCol += NdotL * u_light.diffuse * color * inCone * att;
vec3 eyeV = normalize( -vertPos );
vec3 halfV = normalize( eyeV + lightV );
float NdotH = max( 0.0, dot( normalV, halfV ) );
float kSpecular = ( u_light.shininess + 2.0 ) * pow( NdotH, u_light.shininess ) / ( 2.0 * 3.14159265 );
lightCol += kSpecular * u_light.specular * color * inCone * att;
gl_FragColor = vec4( lightCol.rgb, 1.0 );
}
</script>
<script id="light-cone-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
varying vec2 vertPos;
void main()
{
vertPos.xy = inPos.xy;
gl_Position = vec4( inPos, 0.0, 1.0 );
}
</script>
<script id="light-cone-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 vertPos;
uniform sampler2D u_colorAttachment0;
uniform vec2 u_depthRange;
uniform vec2 u_vp;
uniform float u_fov;
struct Light {
vec3 position;
vec3 direction;
float ambient;
float diffuse;
float specular;
float shininess;
vec3 attenuation;
float cutOffAngle;
};
uniform Light u_light;
void main()
{
vec4 texCol = texture2D( u_colorAttachment0, vertPos.st * 0.5 + 0.5 );
vec3 vLightPos = u_light.position;
vec3 vLightDir = normalize( u_light.direction );
float tanFOV = tan(u_fov*0.5);
vec3 nearPos = vec3( vertPos.x * u_vp.x/u_vp.y * tanFOV, vertPos.y * tanFOV, -1.0 );
//vec2 texCoord = gl_FragCoord.xy / u_vp;
//vec3 nearPos = vec3( (texCoord.x-0.5) * u_vp.x/u_vp.y, texCoord.y-0.5, -u_depthRange.x );
vec3 los = normalize( nearPos );
// ray definition
vec3 O = vec3(0.0);
vec3 D = los;
// cone definition
vec3 C = vLightPos;
vec3 V = vLightDir;
float cosTh = cos( u_light.cutOffAngle * 0.5 );
// ray - cone intersection
vec3 CO = O - C;
float DdotV = dot( D, V );
float COdotV = dot( CO, V );
float a = DdotV*DdotV - cosTh*cosTh;
float b = 2.0 * (DdotV*COdotV - dot( D, CO )*cosTh*cosTh);
float c = COdotV*COdotV - dot( CO, CO )*cosTh*cosTh;
float det = b*b - 4.0*a*c;
// find intersection
float isIsect = 0.0;
vec3 isectP = vec3(0.0);
if ( det >= 0.0 )
{
vec3 P1 = O + (-b-sqrt(det))/(2.0*a) * D;
vec3 P2 = O + (-b+sqrt(det))/(2.0*a) * D;
float isect1 = step( 0.0, dot(normalize(P1-C), V) );
float isect2 = step( 0.0, dot(normalize(P2-C), V) );
if ( isect1 < 0.5 )
{
P1 = P2;
isect1 = isect2;
}
if ( isect2 < 0.5 )
{
P2 = P1;
isect2 = isect1;
}
isectP = ( P1.z > -u_depthRange.x || (P2.z < -u_depthRange.x && P1.z < P2.z ) ) ? P2 : P1;
isIsect = mix( isect2, 1.0, isect1 ) * step( isectP.z, -u_depthRange.x );
}
float dist = length( isectP - vLightPos.xyz );
float att = 1.0 / dot( vec3( 1.0, dist, dist*dist ), u_light.attenuation );
gl_FragColor = vec4( mix( texCol.rgb, vec3(1.0, 1.0, 1.0), isIsect * att * 0.5 ), 1.0 );
}
</script>
<div><form id="gui" name="inputs">
<table>
<tr> <td> <font color=#40f040>ambient</font> </td>
<td> <input type="range" id="ambient" min="0" max="100" value="0"/></td> </tr>
<tr> <td> <font color=#40f040>diffuse</font> </td>
<td> <input type="range" id="diffuse" min="0" max="100" value="0"/></td> </tr>
<tr> <td> <font color=#40f040>specular</font> </td>
<td> <input type="range" id="specular" min="0" max="100" value="0"/></td> </tr>
<tr> <td> <font color=#40f040>shininess</font> </td>
<td> <input type="range" id="shininess" min="1" max="100" value="0"/></td> </tr>
<tr> <td> <font color=#40f040>cut off angle</font> </td>
<td> <input type="range" id="cutOffAngle" min="1" max="180" value="0"/></td> </tr>
</table>
</form>
</div>
<canvas id="glow-canvas" style="border: none;"></canvas>

Why isn't this shadertoy displaying correctly in glsl?

I'm working on porting this shader from shadertoy into glsl for a GPUImage based iOS platform.
https://www.shadertoy.com/view/4s2yW1
technically, it's running on the device effectively. However, it's only displaying the background, and not the circles, which is the whole point.
I'm wondering if someone can give me some clue as to why this isn't showing up properly. For whatever reason I'm getting the same results when using shaderfrog.
Here's the shaderfrog link:
http://shaderfrog.com/app/view/1463
and the code itself:
#define PI 3.14159265359
NSString *const kGPUImageBokehFragmentShaderString = SHADER_STRING
(
precision highp float;
varying highp vec2 textureCoordinate;
uniform sampler2D inputImageTexture;
uniform highp float time;
void Rotate( vec2 p, float a )
{
p = cos( a ) * p + sin( a ) * vec2( p.y, -p.x );
}
float Circle( vec2 p, float r )
{
return ( length( p / r ) - 1.0 ) * r;
}
float Rand( vec2 c )
{
return fract( sin( dot( c.xy, vec2( 12.9898, 78.233 ) ) ) * 43758.5453 );
}
float saturate( float x )
{
return clamp( x, 0.0, 1.0 );
}
void BokehLayer(vec3 color, vec2 p, vec3 c )
{
float wrap = 450.0;
if ( mod( floor( p.y / wrap + 0.5 ), 2.0 ) == 0.0 )
{
p.x += wrap * 0.5;
}
vec2 p2 = mod( p + 0.5 * wrap, wrap ) - 0.5 * wrap;
vec2 cell = floor( p / wrap + 0.5 );
float cellR = Rand( cell );
c *= fract( cellR * 3.33 + 3.33 );
float radius = mix( 30.0, 70.0, fract( cellR * 7.77 + 7.77 ) );
p2.x *= mix( 0.9, 1.1, fract( cellR * 11.13 + 11.13 ) );
p2.y *= mix( 0.9, 1.1, fract( cellR * 17.17 + 17.17 ) );
float sdf = Circle( p2, radius );
float circle = 1.0 - smoothstep( 0.0, 1.0, sdf * 0.04 );
float glow = exp( -sdf * 0.025 ) * 0.3 * ( 1.0 - circle );
color += c * ( circle + glow );
}
void main()
{
vec2 iResolution = vec2(1., 1.);
vec2 uv = textureCoordinate.xy/iResolution.xy;
vec2 p = ( 2.0 * textureCoordinate - iResolution.xy) / iResolution.x * 1000.0;
// background
vec3 color = mix( vec3( 0.3, 0.1, 0.3 ), vec3( 0.1, 0.4, 0.5 ), dot( uv, vec2( 0.2, 0.7 ) ) );
float timeElapsed = time - 15.0;
Rotate( p, 0.2 + timeElapsed * 0.03 );
BokehLayer( color, p + vec2( -50.0 * timeElapsed + 0.0, 0.0 ), 3.0 * vec3( 0.4, 0.1, 0.2 ) );
Rotate( p, 0.3 - timeElapsed * 0.05 );
BokehLayer( color, p + vec2( -70.0 * timeElapsed + 33.0, -33.0 ), 3.5 * vec3( 0.6, 0.4, 0.2 ) );
Rotate( p, 0.5 + timeElapsed * 0.07 );
BokehLayer( color, p + vec2( -60.0 * timeElapsed + 55.0, 55.0 ), 3.0 * vec3( 0.4, 0.3, 0.2 ) );
Rotate( p, 0.9 - timeElapsed * 0.03 );
BokehLayer( color, p + vec2( -25.0 * timeElapsed + 77.0, 77.0 ), 3.0 * vec3( 0.4, 0.2, 0.1 ) );
Rotate( p, 0.0 + timeElapsed * 0.05 );
BokehLayer( color, p + vec2( -15.0 * timeElapsed + 99.0, 99.0 ), 3.0 * vec3( 0.2, 0.0, 0.4 ) );
vec4 bokehColor = vec4( color, 1.0 );
gl_FragColor = bokehColor;
} );
Anything you can tell me about this would be greatly appreciated. Thanks!
The problem is you don't have color marked as inout
void BokehLayer(vec3 color, vec2 p, vec3 c ) // bad
void BokehLayer(inout vec3 color, vec2 p, vec3 c ) // good
BokehLayer wants to modify color but without the inout keyword it's just a parameter to the function. With inout it's a reference original variable.