Related
I would like to fix the distortion of this image :
Sorry for the quality, but this is the best example I could find.
I don't know if fixing this distortion is possible (I would like to have straight doors, and a straight ceiling), but basically, instead of pushing the pixels outside of the image (red arrows) that is adding a blur effect, I would like to do the opposite (green arrows), to pull the pixels towards the center.
If you have any idea, that would be awesome. Other solutions are welcomed as well !
For the definition of the fisheye effect, you've to associate an angle (alpha) to the diagonal (d) of the viewport.
The diagonal of the viewport is the diameter of of circle which includes the entire viewport.
The relation between the radius of the perimeter circle (r) and the angle (alpha) is:
r = tan(alpha / 2)
In the following eye_angle corresponds to alpha and half_dist to r:
float half_angle = eye_angle/2.0;
float half_dist = tan(half_angle);
With the the aspect ration of the viewport (aspect) and is the normalized device position of the fragment on the viewport ndcPos the position P can be calculated. In normalized device space the x and y are in range [-1, 1]:
vec2 vp_scale = vec2(aspect, 1.0);
vec2 P = ndcPos * vp_scale;
For each point (P) on the view port the relative distance (rel_dist) to the center of the viewport in relation to the perimeter circle has to be calculated. And the relative position (rel_P) of the point (P) in the relation to the aspect ration is needed:
float vp_dia = length(vp_scale);
float rel_dist = length(P) / vp_dia;
vec2 rel_P = normalize(P) / normalize(vp_scale);
The fisheye effect is caused by the projection of a spherical surface on a plane. To calculate from and to the projection, the relation between the arc length and the distance to the center of the plane has to be found:
If the radius of a circle is 1, the length of the arc is equal the angle of the arc segment in radians. So the relation between the distance to the point P and the angle beta is:
|P|/r = tan(beta)
beta = atan(|P|/r)
If the projection from the spherical surface to the plane is:
float beta = rel_dist * half_angle;
vec3 pos_prj = rel_P * tan(beta) / half_dist;
And the projection from the plane to the spherical surface is:
float beta = atan(rel_dist * half_dist);
vec2 pos_prj = rel_P * beta / half_angle;
See the following WebGL example which uses an fragment shader where the algorithm is implemented. The angle alpha is set be the uniform variable u_alpha.
If u_alpha > 0.0, then the projection form spherical surface to the plane is calculated.
If u_alpha < 0.0, then the projection from the plane to the spherical surface is calculated.
(function loadscene() {
var canvas, gl, vp_size, texture, prog, bufObj = {};
function initScene() {
canvas = document.getElementById( "ogl-canvas");
gl = canvas.getContext( "experimental-webgl" );
if ( !gl )
return;
texture = new Texture( "https://raw.githubusercontent.com/Rabbid76/graphics-snippets/master/resource/texture/supermario.jpg" );
texture.bound = false;
progDraw = gl.createProgram();
for (let i = 0; i < 2; ++i) {
let source = document.getElementById(i==0 ? "draw-shader-vs" : "draw-shader-fs").text;
let shaderObj = gl.createShader(i==0 ? gl.VERTEX_SHADER : gl.FRAGMENT_SHADER);
gl.shaderSource(shaderObj, source);
gl.compileShader(shaderObj);
let status = gl.getShaderParameter(shaderObj, gl.COMPILE_STATUS);
if (!status) alert(gl.getShaderInfoLog(shaderObj));
gl.attachShader(progDraw, shaderObj);
gl.linkProgram(progDraw);
}
status = gl.getProgramParameter(progDraw, gl.LINK_STATUS);
if ( !status ) alert(gl.getProgramInfoLog(progDraw));
progDraw.inPos = gl.getAttribLocation(progDraw, "inPos");
progDraw.u_time = gl.getUniformLocation(progDraw, "u_time");
progDraw.u_resolution = gl.getUniformLocation(progDraw, "u_resolution");
progDraw.u_texture = gl.getUniformLocation(progDraw, "u_texture");
progDraw.u_angle = gl.getUniformLocation(progDraw, "u_angle");
gl.useProgram(progDraw);
var pos = [ -1, -1, 1, -1, 1, 1, -1, 1 ];
var inx = [ 0, 1, 2, 0, 2, 3 ];
bufObj.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( pos ), gl.STATIC_DRAW );
bufObj.inx = gl.createBuffer();
bufObj.inx.len = inx.length;
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( inx ), gl.STATIC_DRAW );
gl.enableVertexAttribArray( progDraw.inPos );
gl.vertexAttribPointer( progDraw.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
window.onresize = resize;
resize();
requestAnimationFrame(render);
}
function resize() {
vp_size = [window.innerWidth, window.innerHeight];
canvas.width = vp_size[0];
canvas.height = vp_size[1];
}
function render(deltaMS) {
scale = document.getElementById( "scale" ).value / 100 * 2.0 - 1.0;
gl.viewport( 0, 0, canvas.width, canvas.height );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
texture.bound = texture.bound || texture.bind( 0 );
gl.uniform1i(progDraw.u_texture, 0);
gl.uniform1f(progDraw.u_time, deltaMS/2000.0);
gl.uniform2f(progDraw.u_resolution, canvas.width, canvas.height);
gl.uniform1f(progDraw.u_angle, scale * Math.PI * 0.9);
gl.drawElements( gl.TRIANGLES, bufObj.inx.len, gl.UNSIGNED_SHORT, 0 );
requestAnimationFrame(render);
}
class Texture {
constructor( name, dflt ) {
let texture = this;
this.dflt = dflt || [128,128,128,255]
let image = { "cx": this.dflt.w || 1, "cy": this.dflt.h || 1, "plane": this.dflt.p || this.dflt };
this.size = [image.cx, image.cy];
this.dummyObj = Texture.createTexture2D( image, true )
this.image = new Image(64,64);
this.image.setAttribute('crossorigin', 'anonymous');
this.image.onload = function () {
let cx = 1 << 31 - Math.clz32(texture.image.naturalWidth);
if ( cx < texture.image.naturalWidth ) cx *= 2;
let cy = 1 << 31 - Math.clz32(texture.image.naturalHeight);
if ( cy < texture.image.naturalHeight ) cy *= 2;
var canvas = document.createElement( 'canvas' );
canvas.width = cx;
canvas.height = cy;
var context = canvas.getContext( '2d' );
context.drawImage( texture.image, 0, 0, canvas.width, canvas.height );
texture.textureObj = Texture.createTexture2D( canvas, true );
texture.size = [cx, cy];
}
this.image.src = name;
}
static createTexture2D( image, flipY ) {
let t = gl.createTexture();
gl.activeTexture( gl.TEXTURE0 );
gl.bindTexture( gl.TEXTURE_2D, t );
gl.pixelStorei( gl.UNPACK_FLIP_Y_WEBGL, flipY != undefined && flipY == true );
if ( image.cx && image.cy && image.plane )
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, image.cx, image.cy, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array(image.plane) );
else
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT );
gl.bindTexture( gl.TEXTURE_2D, null );
return t;
}
bind( texUnit = 0 ) {
gl.activeTexture( gl.TEXTURE0 + texUnit );
if ( this.textureObj ) {
gl.bindTexture( gl.TEXTURE_2D, this.textureObj );
return true;
}
gl.bindTexture( gl.TEXTURE_2D, this.dummyObj );
return false;
}
};
initScene();
})();
html,body { margin: 0; overflow: hidden; }
#gui { position : absolute; top : 0; left : 0; }
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
uniform float u_time;
uniform vec2 u_resolution;
uniform float u_angle;
uniform sampler2D u_texture;
void main()
{
vec2 uv = gl_FragCoord.xy / u_resolution;
vec2 ndcPos = uv * 2.0 - 1.0;
float aspect = u_resolution.x / u_resolution.y;
float eye_angle = abs(u_angle);
float half_angle = eye_angle/2.0;
float half_dist = tan(half_angle);
vec2 vp_scale = vec2(aspect, 1.0);
vec2 P = ndcPos * vp_scale;
float vp_dia = length(vp_scale);
float rel_dist = length(P) / vp_dia;
vec2 rel_P = normalize(P) / normalize(vp_scale);
vec2 pos_prj = ndcPos;
if (u_angle > 0.0)
{
float beta = rel_dist * half_angle;
pos_prj = rel_P * tan(beta) / half_dist;
}
else if (u_angle < 0.0)
{
float beta = atan(rel_dist * half_dist);
pos_prj = rel_P * beta / half_angle;
}
vec2 uv_prj = pos_prj * 0.5 + 0.5;
vec2 rangeCheck = step(vec2(0.0), uv_prj) * step(uv_prj, vec2(1.0));
if (rangeCheck.x * rangeCheck.y < 0.5)
discard;
vec4 texColor = texture2D(u_texture, uv_prj.st);
gl_FragColor = vec4( texColor.rgb, 1.0 );
}
</script>
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
void main()
{
gl_Position = vec4( inPos.xy, 0.0, 1.0 );
}
</script>
<canvas id="ogl-canvas" style="border: none"></canvas>
<form id="gui" name="inputs">
<input type="range" id="scale" min="0" max="100" value="20"/>
</form>
This kind of distortion is a property of a camera. If you have access to the camera used to take the picture, you can calibrate it in order to extract the deformation it applies. Otherwise, you can still use some general equations. Have a look at Paul Bourke's article, this Stack Overflow answer or this Shader Toy example, to see how it could be done.
I recently learn opengl es 2.0, and now I try to make a gaussian blur on triangles generate by myself. I have some difficult to understand examples on the web and most apply the blur on an image. I know I have to use framebuffer but I don't know how to draw triangle on this and apply blur.
Is it possible to see a real and complete code in C++ with good explication ?
EDIT :
#include <stdio.h>
#include <stdlib.h>
#include <iostream>
#define GLFW_INCLUDE_ES2
#include <GLFW/glfw3.h>
#include "shaders.hpp"
#include "camera.hpp"
unsigned int vbo, cbo, tbo;
GLuint _fbo, _fbo2, _tex, _tex2;
static const GLuint WIDTH = 800;
static const GLuint HEIGHT = 600;
GLuint pos, col, tex, normal;
camera * _camera = new camera();
static const GLfloat vertices[] = {
0.0f, 1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
-1.0f, -1.0f, 0.0f
};
static const GLfloat colors[] = {
0.0f, 0.5f, 1.0f,
0.5f, 0.5f, 1.0f,
0.5f, 0.5f, 1.0f
};
static const GLfloat texture[] = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f
};
int main(void){
GLFWwindow* window;
shaders * shaderBasic;
GLuint pId;
glm::mat4 projection; static glm::mat4 view; static glm::mat4 model;
glfwInit();
glfwWindowHint(GLFW_CLIENT_API, GLFW_OPENGL_ES_API);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
window = glfwCreateWindow(WIDTH, HEIGHT, __FILE__, NULL, NULL);
glfwMakeContextCurrent(window);
printf("GL_VERSION : %s\n", glGetString(GL_VERSION) );
printf("GL_RENDERER : %s\n", glGetString(GL_RENDERER) );
std::string vs, fs;
vs = "basic.vs";
fs = "basic.fs";
shaderBasic = new shaders(vs, fs);
shaderBasic->CompileShader();
shaderBasic->LinkShader();
pId = shaderBasic->getProgramId();
pos = glGetAttribLocation(pId, "position");
col = glGetAttribLocation(pId, "colors");
tex = glGetAttribLocation(pId, "tex");
fs = "lastBlur.fs";
shaders * blurShader;
GLuint pIdBlur;
blurShader = new shaders(vs, fs);
blurShader->CompileShader();
blurShader->LinkShader();
pIdBlur = blurShader->getProgramId();
_camera->setPositionCamera(glm::vec3(0, 0, -1));
_camera->setLookAtCamera(glm::vec3(0, 0, 0));
_camera->setFieldOfView(45);
_camera->setAspect(WIDTH, HEIGHT);
_camera->setViewport(WIDTH, HEIGHT);
_camera->getMatricies(projection, view, model);
glGenFramebuffers(1, &_fbo);
glGenTextures(1, &_tex);
glBindFramebuffer(GL_FRAMEBUFFER, _fbo);
glBindTexture(GL_TEXTURE_2D, _tex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, WIDTH/2, HEIGHT/2, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, _tex, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
std::cout << "ERROR::FRAMEBUFFER:: Framebuffer is not complete!" << std::endl;
else{
std::cout << "FRAMEBUFFER COMPLETE" << std::endl;
}
auto sampTex = glGetUniformLocation(pIdBlur, "texture0");
std::cerr << "sampTex : " << sampTex << std::endl;
glUniform1i(sampTex, 0);
while (!glfwWindowShouldClose(window)) {
// glViewport(0, 0, WIDTH, HEIGHT);
glBindFramebuffer(GL_FRAMEBUFFER, _fbo);
glClearColor(0.0f, 0.0f, 0.4f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// glViewport(0, 0, WIDTH/2, HEIGHT/2);
glUseProgram(pIdBlur);
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(pos, 3, GL_FLOAT, false, 0, 0);
glEnableVertexAttribArray(pos);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glGenBuffers(1, &cbo);
glBindBuffer(GL_ARRAY_BUFFER, cbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(colors), colors, GL_STATIC_DRAW);
glVertexAttribPointer(col, 2, GL_FLOAT, false, 0, 0);
glEnableVertexAttribArray(col);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glGenBuffers(1, &tbo);
glBindBuffer(GL_ARRAY_BUFFER, tbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(texture), texture, GL_STATIC_DRAW);
glVertexAttribPointer(tex, 2, GL_FLOAT, false, 0, 0);
glEnableVertexAttribArray(tex);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(pId);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _tex);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwPollEvents();
glfwSwapBuffers(window);
}
glDeleteBuffers(1, &vbo);
glfwTerminate();
return EXIT_SUCCESS;
}
Blur Shader:
#version 100
precision mediump float;
uniform sampler2D texture0;
varying vec3 vColor;
varying vec2 TexCoords;
vec4 blur13(sampler2D image, vec2 uv, vec2 resolution, vec2 direction) {
vec4 color = vec4(0.0);
vec2 off1 = vec2(1.411764705882353) * direction;
vec2 off2 = vec2(3.2941176470588234) * direction;
vec2 off3 = vec2(5.176470588235294) * direction;
color += texture2D(image, uv) * 0.1964825501511404;
color += texture2D(image, uv + (off1 / resolution)) * 0.2969069646728344;
color += texture2D(image, uv - (off1 / resolution)) * 0.2969069646728344;
color += texture2D(image, uv + (off2 / resolution)) * 0.09447039785044732;
color += texture2D(image, uv - (off2 / resolution)) * 0.09447039785044732;
color += texture2D(image, uv + (off3 / resolution)) * 0.010381362401148057;
color += texture2D(image, uv - (off3 / resolution)) * 0.010381362401148057;
return color;
}
void main(){
gl_FragColor = blur13(texture0, TexCoords, vec2(400, 300), vec2(1.0, 0.0));
}
I assume you have swapped pIdBlur and pId.
I' will give you introductions for gaussian blur shader with 2 passes. This is an approximation which first blurs along the X-Axis in the 1st pass and along the Y-Axis in the 2nd pass. This results in a better performance for strong blurring.
The blur shader uses a normal (or Gaussian) distribution. For the 2 passes is used the same shader program, with individual direction settings for the 2 passes, stored in the uniform vec2 u_dir. The strength of the blur effect can be varied with the uniform variable float u_sigma in the range [0.0, 1.0].
Blur Vertex shader
precision mediump float;
attribute vec2 inPos;
varying vec2 pos;
void main()
{
pos = inPos;
gl_Position = vec4( inPos, 0.0, 1.0 );
}
Blur Fragment shader
precision mediump float;
varying vec2 pos;
uniform sampler2D u_texture;
uniform vec2 u_textureSize;
uniform float u_sigma;
uniform vec2 u_dir;
float CalcGauss( float x, float sigma )
{
if ( sigma <= 0.0 )
return 0.0;
return exp( -(x*x) / (2.0 * sigma) ) / (2.0 * 3.14157 * sigma);
}
void main()
{
vec2 texC = pos.st * 0.5 + 0.5;
vec4 texCol = texture2D( u_texture, texC );
vec4 gaussCol = vec4( texCol.rgb, 1.0 );
vec2 step = u_dir / u_textureSize;
for ( int i = 1; i <= 32; ++ i )
{
float weight = CalcGauss( float(i) / 32.0, u_sigma * 0.5 );
if ( weight < 1.0/255.0 )
break;
texCol = texture2D( u_texture, texC + step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
texCol = texture2D( u_texture, texC - step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
}
gaussCol.rgb = clamp( gaussCol.rgb / gaussCol.w, 0.0, 1.0 );
gl_FragColor = vec4( gaussCol.rgb, 1.0 );
}
After the program has been linked the uniform locations and attribute indices can be read from:
GLint attrInxPos = glGetAttribLocation( pIdBlur, "inPos" );
GLint locTexture = glGetUniformLocation( pIdBlur, "u_texture" );
GLint locTexSize = glGetUniformLocation( pIdBlur, "u_textureSize" );
GLint locSigma = glGetUniformLocation( pIdBlur, "u_sigma" );
GLint locDir = glGetUniformLocation( pIdBlur, "u_dir" );
A vertex array object, containing a quad, which later will be drawn over the whole viewport, for a screen space blur pass, has to be created:
GLuint screenVAO;
glGenVertexArrays( 1, &screenVAO );
glBindVertexArray( screenVAO );
GLuint quadBuf;
glGenBuffers( 1, &quadBuf );
glBindBuffer( GL_ARRAY_BUFFER, quadBuf );
GLfloat screenRect[] = { -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f };
glBufferData( GL_ARRAY_BUFFER, 8 * sizeof( float ), screenRect, GL_STATIC_DRAW );
glEnableVertexAttribArray( attrInxPos );
glVertexAttribPointer( attrInxPos, 2, GL_FLOAT, GL_FALSE, 0, nullptr );
2 frame buffers, with a texture attached to its color plane, have to be created. In the 1st one the scene is drawn. The
2nd one is used by the 1st blur pass. The 2nd blur pass draws directly to the drawing buffer.
GLuint texObj[2];
GLuint fbObj[2];
glGenTextures(2, texObj);
glGenFramebuffers(2, fbObj);
glActiveTexture(GL_TEXTURE0);
for ( int i = 0; i < 2; i ++ )
{
glBindTexture(GL_TEXTURE_2D, texObj[i]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
glBindFramebuffer(GL_FRAMEBUFFER, fbObj[i]);
glFramebufferTexture2D( GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texObj[i], 0 );
GLuint renderbuffer;
glGenRenderbuffers(1, &renderbuffer);
glBindRenderbuffer( GL_RENDERBUFFER, renderbuffer );
glRenderbufferStorage( GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, width, height );
glFramebufferRenderbuffer( GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, renderbuffer );
}
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
Now everything what is needed for the blur passes has been generated.
To draw and blur the scene the following steps have to be applied.
First you have to bind and clear the 1st frame buffer
glBindFramebuffer(GL_FRAMEBUFFER, fbObj[0]);
glClearColor(0.0f, 0.0f, 0.4f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
and use the shader program for drawing the objects:
glUseProgram(pId);
Now draw the object(s) of the scene.
.....
glDrawArrays(GL_TRIANGLES, 0, 3);
The second step is the 1st blur pass. The blur program has to be use and the 2nd framebuffer has to be bound.
After the frame 1st buffer has been released, you can use the texture, that is attached to its color plane, as an input for the blur shader. Note, a texture can't be source and destination at the same time, this would cause undefined behavior.
To bind the texture to the shader, you have to bind the texture to a texture unit and assign the index of the texture unit to the uniform sampler of the shader.
int texUnitIndex = 1;
GLfloat texSize = { width, height };
GLfloat dirX[] = { 1.0f, 0.0f };
GLfloat sigma = .....; // 0.0 <= sigma <= 1.0
glBindFramebuffer(GL_FRAMEBUFFER, fbObj[1]);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(pIdBlur);
glActiveTexture(GL_TEXTURE0 + texUnitIndex);
glBindTexture(GL_TEXTURE_2D, texObj[0]);
glUniform1i(locTexture, texUnitIndex);
glUniform2fv(locTexSize, texSize);
glUniform2fv(locTexSize, dirX);
glUniform1f(locTexSize, sigma);
To apply the blur pass a quad has to be drawn of the viewport area.
glBindVertexArray( screenVAO );
glDrawArrays( GL_TRIANGLE_STRIP, 0, 4 );
The 2nd and final blur pass, is similar to the 1st blur pass. The target texture of the 1st blur pass is the source texture, and the target is the drawing buffer. The blur direction has to be set up for the Y axis of the viewport.
GLfloat dirY[] = { 0.0f, 1.0f };
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, texObj[1]);
glUniform2fv(locTexSize, dirY);
See also the answers to the following question:
How to get a "Glow" shader effect in OpenGL ES 2.0?
What kind of blurs can be implemented in pixel shaders?
See alos a similar WebGL example:
(function loadscene() {
var resize, gl, progDraw, progBlur, vp_size, blurFB;
var canvas, camera, bufCube = {}, bufQuad = {};
var shininess = 10.0, glow = 10.0, sigma = 0.8, radius = 1.0;
function render(deltaMS){
var sliderScale = 100;
sigma = document.getElementById( "sigma" ).value / sliderScale;
radius = document.getElementById( "radius" ).value / sliderScale;
vp_size = [canvas.width, canvas.height];
camera.Update( vp_size );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
// set up framebuffer
gl.bindFramebuffer( gl.FRAMEBUFFER, blurFB[0] );
gl.viewport( 0, 0, blurFB[0].width, blurFB[0].height );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
// setup view projection and model
var prjMat = camera.Perspective();
var viewMat = camera.LookAt();
var modelMat = RotateAxis( IdentM44(), Fract( deltaMS / 13000.0 ) * 2.0 * Math.PI, 0 );
modelMat = RotateAxis( modelMat, Fract( deltaMS / 17000.0 ) * 2.0 * Math.PI, 1 );
// set up draw shader
ShProg.Use( progDraw );
ShProg.SetM44( progDraw, "u_projectionMat44", prjMat );
ShProg.SetM44( progDraw, "u_modelViewMat44", Multiply(viewMat, modelMat) );
ShProg.SetF1( progDraw, "u_shininess", shininess );
// draw scene
VertexBuffer.Draw( bufCube );
// set blur-X framebuffer and bind frambuffer texture
gl.bindFramebuffer( gl.FRAMEBUFFER, blurFB[1] );
gl.viewport( 0, 0, blurFB[1].width, blurFB[1].height );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
var texUnit = 1;
gl.activeTexture( gl.TEXTURE0 + texUnit );
gl.bindTexture( gl.TEXTURE_2D, blurFB[0].color0_texture );
// set up blur-X shader
ShProg.Use( progBlur );
ShProg.SetI1( progBlur, "u_texture", texUnit )
ShProg.SetF2( progBlur, "u_textureSize", vp_size );
ShProg.SetF1( progBlur, "u_sigma", sigma )
ShProg.SetF1( progBlur, "u_radius", radius )
ShProg.SetF2( progBlur, "u_dir", [1.0, 0.0] )
// draw full screen space
gl.enableVertexAttribArray( progBlur.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.vertexAttribPointer( progBlur.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.drawElements( gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progBlur.inPos );
// reset framebuffer and bind frambuffer texture
gl.bindFramebuffer( gl.FRAMEBUFFER, null );
gl.viewport( 0, 0, vp_size[0], vp_size[1] );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
texUnit = 2;
gl.activeTexture( gl.TEXTURE0 + texUnit );
gl.bindTexture( gl.TEXTURE_2D, blurFB[1].color0_texture );
// set up pst process shader
ShProg.SetI1( progBlur, "u_texture", texUnit )
ShProg.SetF1( progBlur, "u_radius", radius )
ShProg.SetF2( progBlur, "u_dir", [0.0, 1.0] )
// draw full screen space
gl.enableVertexAttribArray( progBlur.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.vertexAttribPointer( progBlur.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.drawElements( gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progBlur.inPos );
requestAnimationFrame(render);
}
function initScene() {
canvas = document.getElementById( "canvas");
gl = canvas.getContext( "experimental-webgl" );
if ( !gl )
return null;
progDraw = ShProg.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
if ( !progDraw.progObj )
return null;
progDraw.inPos = gl.getAttribLocation( progDraw.progObj, "inPos" );
progDraw.inNV = gl.getAttribLocation( progDraw.progObj, "inNV" );
progDraw.inCol = gl.getAttribLocation( progDraw.progObj, "inCol" );
progBlur = ShProg.Create(
[ { source : "post-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "blur-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
progBlur.inPos = gl.getAttribLocation( progBlur.progObj, "inPos" );
if ( !progBlur.progObj )
return;
// create cube
var cubePos = [
-1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 1.0,
-1.0, -1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0 ];
var cubeCol = [ 1.0, 0.0, 0.0, 1.0, 0.5, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 ];
var cubeHlpInx = [ 0, 1, 2, 3, 1, 5, 6, 2, 5, 4, 7, 6, 4, 0, 3, 7, 3, 2, 6, 7, 1, 0, 4, 5 ];
var cubePosData = [];
for ( var i = 0; i < cubeHlpInx.length; ++ i ) {
cubePosData.push( cubePos[cubeHlpInx[i]*3], cubePos[cubeHlpInx[i]*3+1], cubePos[cubeHlpInx[i]*3+2] );
}
var cubeNVData = [];
for ( var i1 = 0; i1 < cubeHlpInx.length; i1 += 4 ) {
var nv = [0, 0, 0];
for ( i2 = 0; i2 < 4; ++ i2 ) {
var i = i1 + i2;
nv[0] += cubePosData[i*3]; nv[1] += cubePosData[i*3+1]; nv[2] += cubePosData[i*3+2];
}
for ( i2 = 0; i2 < 4; ++ i2 )
cubeNVData.push( nv[0], nv[1], nv[2] );
}
var cubeColData = [];
for ( var is = 0; is < 6; ++ is ) {
for ( var ip = 0; ip < 4; ++ ip ) {
cubeColData.push( cubeCol[is*3], cubeCol[is*3+1], cubeCol[is*3+2] );
}
}
var cubeInxData = [];
for ( var i = 0; i < cubeHlpInx.length; i += 4 ) {
cubeInxData.push( i, i+1, i+2, i, i+2, i+3 );
}
bufCube = VertexBuffer.Create(
[ { data : cubePosData, attrSize : 3, attrLoc : progDraw.inPos },
{ data : cubeNVData, attrSize : 3, attrLoc : progDraw.inNV },
{ data : cubeColData, attrSize : 3, attrLoc : progDraw.inCol } ],
cubeInxData );
bufQuad.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( [ -1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0 ] ), gl.STATIC_DRAW );
bufQuad.inx = gl.createBuffer();
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( [ 0, 1, 2, 0, 2, 3 ] ), gl.STATIC_DRAW );
camera = new Camera( [0, 3, 0.0], [0, 0, 0], [0, 0, 1], 90, vp_size, 0.5, 100 );
window.onresize = resize;
resize();
requestAnimationFrame(render);
}
function resize() {
//vp_size = [gl.drawingBufferWidth, gl.drawingBufferHeight];
vp_size = [window.innerWidth, window.innerHeight]
//vp_size = [256, 256]
canvas.width = vp_size[0];
canvas.height = vp_size[1];
var fbsize = Math.max(vp_size[0], vp_size[1]);
fbsize = 1 << 31 - Math.clz32(fbsize); // nearest power of 2
blurFB = [];
for ( var i = 0; i < 2; ++ i ) {
fb = gl.createFramebuffer();
fb.width = fbsize;
fb.height = fbsize;
gl.bindFramebuffer( gl.FRAMEBUFFER, fb );
fb.color0_texture = gl.createTexture();
gl.bindTexture( gl.TEXTURE_2D, fb.color0_texture );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST );
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, fb.width, fb.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null );
fb.renderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer( gl.RENDERBUFFER, fb.renderbuffer );
gl.renderbufferStorage( gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, fb.width, fb.height );
gl.framebufferTexture2D( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, fb.color0_texture, 0 );
gl.framebufferRenderbuffer( gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, fb.renderbuffer );
gl.bindTexture( gl.TEXTURE_2D, null );
gl.bindRenderbuffer( gl.RENDERBUFFER, null );
gl.bindFramebuffer( gl.FRAMEBUFFER, null );
blurFB.push( fb );
}
}
function Fract( val ) {
return val - Math.trunc( val );
}
function CalcAng( deltaTime, intervall ) {
return Fract( deltaTime / (1000*intervall) ) * 2.0 * Math.PI;
}
function CalcMove( deltaTime, intervall, range ) {
var pos = self.Fract( deltaTime / (1000*intervall) ) * 2.0
var pos = pos < 1.0 ? pos : (2.0-pos)
return range[0] + (range[1] - range[0]) * pos;
}
function EllipticalPosition( a, b, angRag ) {
var a_b = a * a - b * b
var ea = (a_b <= 0) ? 0 : Math.sqrt( a_b );
var eb = (a_b >= 0) ? 0 : Math.sqrt( -a_b );
return [ a * Math.sin( angRag ) - ea, b * Math.cos( angRag ) - eb, 0 ];
}
function IdentM44() {
return [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ];
};
function RotateAxis(matA, angRad, axis) {
var aMap = [ [1, 2], [2, 0], [0, 1] ];
var a0 = aMap[axis][0], a1 = aMap[axis][1];
var sinAng = Math.sin(angRad), cosAng = Math.cos(angRad);
var matB = matA.slice(0);
for ( var i = 0; i < 3; ++ i ) {
matB[a0*4+i] = matA[a0*4+i] * cosAng + matA[a1*4+i] * sinAng;
matB[a1*4+i] = matA[a0*4+i] * -sinAng + matA[a1*4+i] * cosAng;
}
return matB;
}
function Rotate(matA, angRad, axis) {
var s = Math.sin(angRad), c = Math.cos(angRad);
var x = axis[0], y = axis[1], z = axis[2];
matB = [
x*x*(1-c)+c, x*y*(1-c)-z*s, x*z*(1-c)+y*s, 0,
y*x*(1-c)+z*s, y*y*(1-c)+c, y*z*(1-c)-x*s, 0,
z*x*(1-c)-y*s, z*y*(1-c)+x*s, z*z*(1-c)+c, 0,
0, 0, 0, 1 ];
return Multiply(matA, matB);
}
function Multiply(matA, matB) {
matC = IdentM44();
for (var i0=0; i0<4; ++i0 )
for (var i1=0; i1<4; ++i1 )
matC[i0*4+i1] = matB[i0*4+0] * matA[0*4+i1] + matB[i0*4+1] * matA[1*4+i1] + matB[i0*4+2] * matA[2*4+i1] + matB[i0*4+3] * matA[3*4+i1]
return matC;
}
function Cross( a, b ) { return [ a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0], 0.0 ]; }
function Dot( a, b ) { return a[0]*b[0] + a[1]*b[1] + a[2]*b[2]; }
function Normalize( v ) {
var len = Math.sqrt( v[0] * v[0] + v[1] * v[1] + v[2] * v[2] );
return [ v[0] / len, v[1] / len, v[2] / len ];
}
Camera = function( pos, target, up, fov_y, vp, near, far ) {
this.Time = function() { return Date.now(); }
this.pos = pos;
this.target = target;
this.up = up;
this.fov_y = fov_y;
this.vp = vp;
this.near = near;
this.far = far;
this.Perspective = function() {
var n = this.near;
var f = this.far;
var fn = f + n;
var f_n = f - n;
var r = this.vp[0] / this.vp[1];
var t = 1 / Math.tan( Math.PI * this.fov_y / 360 );
return [
t/r, 0, 0, 0,
0, t, 0, 0,
0, 0, -fn/f_n, -1,
0, 0, -2*f*n/f_n, 0 ];
};
this.LookAt = function() {
var mz = Normalize( [ this.pos[0]-this.target[0], this.pos[1]-this.target[1], this.pos[2]-this.target[2] ] );
var mx = Normalize( Cross( this.up, mz ) );
var my = Normalize( Cross( mz, mx ) );
var tx = Dot( mx, this.pos );
var ty = Dot( my, this.pos );
var tz = Dot( [-mz[0], -mz[1], -mz[2]], this.pos );
return [mx[0], my[0], mz[0], 0, mx[1], my[1], mz[1], 0, mx[2], my[2], mz[2], 0, tx, ty, tz, 1];
};
this.Update = function(vp_size) {
if (vp_size)
this.vp = vp_size;
};
}
var ShProg = {
Create: function (shaderList) {
var shaderObjs = [];
for (var i_sh = 0; i_sh < shaderList.length; ++i_sh) {
var shderObj = this.Compile(shaderList[i_sh].source, shaderList[i_sh].stage);
if (shderObj) shaderObjs.push(shderObj);
}
var prog = {}
prog.progObj = this.Link(shaderObjs)
if (prog.progObj) {
prog.attrInx = {};
var noOfAttributes = gl.getProgramParameter(prog.progObj, gl.ACTIVE_ATTRIBUTES);
for (var i_n = 0; i_n < noOfAttributes; ++i_n) {
var name = gl.getActiveAttrib(prog.progObj, i_n).name;
prog.attrInx[name] = gl.getAttribLocation(prog.progObj, name);
}
prog.uniLoc = {};
var noOfUniforms = gl.getProgramParameter(prog.progObj, gl.ACTIVE_UNIFORMS);
for (var i_n = 0; i_n < noOfUniforms; ++i_n) {
var name = gl.getActiveUniform(prog.progObj, i_n).name;
prog.uniLoc[name] = gl.getUniformLocation(prog.progObj, name);
}
}
return prog;
},
AttrI: function (prog, name) { return prog.attrInx[name]; },
UniformL: function (prog, name) { return prog.uniLoc[name]; },
Use: function (prog) { gl.useProgram(prog.progObj); },
SetI1: function (prog, name, val) { if (prog.uniLoc[name]) gl.uniform1i(prog.uniLoc[name], val); },
SetF1: function (prog, name, val) { if (prog.uniLoc[name]) gl.uniform1f(prog.uniLoc[name], val); },
SetF2: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform2fv(prog.uniLoc[name], arr); },
SetF3: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform3fv(prog.uniLoc[name], arr); },
SetF4: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform4fv(prog.uniLoc[name], arr); },
SetM33: function (prog, name, mat) { if (prog.uniLoc[name]) gl.uniformMatrix3fv(prog.uniLoc[name], false, mat); },
SetM44: function (prog, name, mat) { if (prog.uniLoc[name]) gl.uniformMatrix4fv(prog.uniLoc[name], false, mat); },
Compile: function (source, shaderStage) {
var shaderScript = document.getElementById(source);
if (shaderScript)
source = shaderScript.text;
var shaderObj = gl.createShader(shaderStage);
gl.shaderSource(shaderObj, source);
gl.compileShader(shaderObj);
var status = gl.getShaderParameter(shaderObj, gl.COMPILE_STATUS);
if (!status) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : null;
},
Link: function (shaderObjs) {
var prog = gl.createProgram();
for (var i_sh = 0; i_sh < shaderObjs.length; ++i_sh)
gl.attachShader(prog, shaderObjs[i_sh]);
gl.linkProgram(prog);
status = gl.getProgramParameter(prog, gl.LINK_STATUS);
if ( !status ) alert(gl.getProgramInfoLog(prog));
return status ? prog : null;
} };
var VertexBuffer = {
Create: function(attribs, indices, type) {
var buffer = { buf: [], attr: [], inx: gl.createBuffer(), inxLen: indices.length, primitive_type: type ? type : gl.TRIANGLES };
for (var i=0; i<attribs.length; ++i) {
buffer.buf.push(gl.createBuffer());
buffer.attr.push({ size : attribs[i].attrSize, loc : attribs[i].attrLoc, no_of: attribs[i].data.length/attribs[i].attrSize });
gl.bindBuffer(gl.ARRAY_BUFFER, buffer.buf[i]);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array( attribs[i].data ), gl.STATIC_DRAW);
}
gl.bindBuffer(gl.ARRAY_BUFFER, null);
if ( buffer.inxLen > 0 ) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer.inx);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( indices ), gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
}
return buffer;
},
Draw: function(bufObj) {
for (var i=0; i<bufObj.buf.length; ++i) {
gl.bindBuffer(gl.ARRAY_BUFFER, bufObj.buf[i]);
gl.vertexAttribPointer(bufObj.attr[i].loc, bufObj.attr[i].size, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray( bufObj.attr[i].loc);
}
if ( bufObj.inxLen > 0 ) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, bufObj.inx);
gl.drawElements(bufObj.primitive_type, bufObj.inxLen, gl.UNSIGNED_SHORT, 0);
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, null );
}
else
gl.drawArrays(bufObj.primitive_type, 0, bufObj.attr[0].no_of );
for (var i=0; i<bufObj.buf.length; ++i)
gl.disableVertexAttribArray(bufObj.attr[i].loc);
gl.bindBuffer( gl.ARRAY_BUFFER, null );
} };
initScene();
})();
html,body { margin: 0; overflow: hidden; }
#gui { position : absolute; top : 0; left : 0; }
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec3 inPos;
attribute vec3 inNV;
attribute vec3 inCol;
varying vec3 vertPos;
varying vec3 vertNV;
varying vec3 vertCol;
uniform mat4 u_projectionMat44;
uniform mat4 u_modelViewMat44;
void main()
{
vertNV = mat3( u_modelViewMat44 ) * normalize( inNV );
vertCol = inCol;
vec4 pos = u_modelViewMat44 * vec4( inPos, 1.0 );
vertPos = pos.xyz / pos.w;
gl_Position = u_projectionMat44 * pos;
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec3 vertPos;
varying vec3 vertNV;
varying vec3 vertCol;
uniform float u_shininess;
void main()
{
vec3 color = vertCol;
vec3 normalV = normalize( vertNV );
vec3 eyeV = normalize( -vertPos );
vec3 halfV = normalize( eyeV + normalV );
float NdotH = max( 0.0, dot( normalV, halfV ) );
float shineFac = ( u_shininess + 2.0 ) * pow( NdotH, u_shininess ) / ( 2.0 * 3.14159265 );
gl_FragColor = vec4( color.rgb * (0.2 + NdotH), 1.0 );
}
</script>
<script id="post-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
varying vec2 pos;
void main()
{
pos = inPos;
gl_Position = vec4( inPos, 0.0, 1.0 );
}
</script>
<script id="blur-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 pos;
uniform sampler2D u_texture;
uniform vec2 u_textureSize;
uniform float u_sigma;
uniform float u_radius;
uniform vec2 u_dir;
float CalcGauss( float x, float sigma )
{
if ( sigma <= 0.0 )
return 0.0;
return exp( -(x*x) / (2.0 * sigma) ) / (2.0 * 3.14157 * sigma);
}
void main()
{
vec2 texC = pos.st * 0.5 + 0.5;
vec4 texCol = texture2D( u_texture, texC );
vec4 gaussCol = vec4( texCol.rgb, 1.0 );
vec2 step = u_dir / u_textureSize;
for ( int i = 1; i <= 32; ++ i )
{
float weight = CalcGauss( float(i) / 32.0, u_sigma * 0.5 );
if ( weight < 1.0/255.0 )
break;
texCol = texture2D( u_texture, texC + u_radius * step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
texCol = texture2D( u_texture, texC - u_radius * step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
}
gaussCol.rgb = clamp( gaussCol.rgb / gaussCol.w, 0.0, 1.0 );
gl_FragColor = vec4( gaussCol.rgb, 1.0 );
}
</script>
<div>
<form id="gui" name="inputs">
<table>
<tr> <td> <font color= #CCF>radius</font> </td>
<td> <input type="range" id="radius" min="1" max="1000" value="200"/></td> </tr>
<tr> <td> <font color= #CCF>blur</font> </td>
<td> <input type="range" id="sigma" min="1" max="50" value="10"/></td> </tr>
</table>
</form>
</div>
<canvas id="canvas" style="border: none;"></canvas>
In general you need to draw the scene you want to blur to a frame buffer object (FBO) with attached texture.
Create a frame buffer
Create an empty texture (data parameter should be null)
Bind frame buffer and texture
Attach the texture to frame buffer as color
At this point the rest of the drawing should be exactly the same as on your main buffer but make sure you setup a correct viewport. This procedure will make you draw to the texture.
Now that you have a texture with your scene you need to go to the same procedure as is for blurring an image.
Bind your main buffer (usually indexed 0)
Bind texture
Draw the texture to main buffer with blur shader
You can then optimize it with horizontal and vertical blur shaders having 2 calls which uses yet another FBO...
So I would try doing some steps in your application:
Create a scene which draws and shows a triangle
Create a FBO, draw to it and draw the FBO texture on your main buffer
Create a scene which draws and shows a blurred image through texture
Create a FBO on which the scene is drawn then draws a blurred texture from FBO on the main buffer
If you find yourself in trouble at any of these points you might want to ask a specific question about it.
I am trying to get world Coordinates from screen coordinates with OpenGl. I made some research but values returned are uncoherent. In fact in my world I have a surface at Z=0 and I want to have the 3d coordinates of a point in that surface so with Z=0. Here my research :
float cursZ = 1;
long posX, posY, posZ = 0;
int viewport[4];
viewport[0] = Originex;
viewport[1] = Originey;
viewport[2] = largeFen;
viewport[3] = hautFen;
CMatrix4 modelView;
CMatrix4 proj;
CMatrix4 A;
CMatrix4 out;
typeCoorDepl proche = 100.;
typeCoorDepl loin = largeFen + hautFen;
Renderer.GetMatrix(MAT_MODELVIEW, modelView);
Renderer.GetMatrix(MAT_PROJECTION, proj);
modelView.LookAt(TVector3T(sendAngleX, angleY, SendAngleZ), TVector3T(0, 0, 0));
switch (modeVue)// get MAT_PROJECTION
{
case 1: // mode isometrique rotation centree sur l'ecran
proj.OrthoOffCenter(-(largeFen / 2.0f), (hautFen / 2.0f), (largeFen / 2.0f), -(hautFen / 2.0f), proche, loin);
break;
case 2: // mode conique rotation centree sur l'ecran
proj.PerspectiveFOV((70.0*pi / 180) / 2, largeFen / hautFen, proche, loin);
break;
case 3: // mode isometrique rotation centree sur l'ecran
proj.OrthoOffCenter(-(largeFen / 2.0f), (hautFen / 2.0f), (largeFen / 2.0f), -(hautFen / 2.0f), proche, loin);
break;
case 4: // mode conique rotation centree sur l'objet et non sur l'ecran
proj.PerspectiveFOV((70.0*pi / 180), largeFen / hautFen, proche, loin);
break;
default:
break;
}
cursY = hautFen - cursY; // windows start from top-left as openGL from bottom-left
double posx, posy, posz;
float m[16];//, A[16];
float in[4];//, out[4];
A = proj*modelView;
A.Inverse();
in[0] = cursX;
in[1] = cursY;
in[2] = cursZ;
in[3] = 1.0;
/* Map x and y from window coordinates */
in[0] = (in[0] - viewport[0]) / viewport[2];
in[1] = (in[1] - viewport[1]) / viewport[3];
/* Map to range -1 to 1 */
in[0] = in[0] * 2 - 1;
in[1] = in[1] * 2 - 1;
in[2] = in[2] * 2 - 1;
CMatrix4 forTestIn;
forTestIn.a11 = in[0];
forTestIn.a12 = in[1];
forTestIn.a13 = in[2];
forTestIn.a14 = in[3];
out = A*forTestIn;
out.a11 /= out.a14;
out.a12 /= out.a14;
out.a13 /= out.a14;
*finPosX = out.a11;
*finPosY = out.a12;
*finPosZ = out.a13;
Here my OrthoOffCenter and PerspectiveFOV function :
inline void CMatrix4::OrthoOffCenter(typeCoorDepl Left, typeCoorDepl Top, typeCoorDepl Right, typeCoorDepl Bottom, typeCoorDepl Near, typeCoorDepl Far)
{
a11 = 2 / (Right - Left); a12 = 0.0f; a13 = 0.0f; a14 = (Left + Right) / (Left - Right);
a21 = 0.0f; a22 = 2 / (Top - Bottom); a23 = 0.0f; a24 = (Bottom + Top) / (Bottom - Top);
// a31 = 0.0f; a32 = 0.0f; a33 = -2/(Far - Near); a34 = (Near + Far) / (Far - Near);
// substitution Far/Near cause pb depth buffer
a31 = 0.0f; a32 = 0.0f; a33 = -2/(Near - Far); a34 = (Far + Near) / (Near - Far);
a41 = 0.0f; a42 = 0.0f; a43 = 0.0f; a44 = 1.0f;
}
inline void CMatrix4::PerspectiveFOV(typeCoorDepl Fov, typeCoorDepl Ratio, typeCoorDepl Near, typeCoorDepl Far)
{
typeCoorDepl YScale = 1.0f / std::tan(Fov / 2);
typeCoorDepl XScale = YScale / Ratio;
typeCoorDepl Coeff = Far / (Far - Near);
a11 = XScale; a12 = 0.0f; a13 = 0.0f; a14 = 0.0f;
a21 = 0.0f; a22 = YScale; a23 = 0.0f; a24 = 0.0f;
a31 = 0.0f; a32 = 0.0f; a33 = Coeff; a34 = Near * -Coeff;
a41 = 0.0f; a42 = 0.0f; a43 = 1.0f; a44 = 0.0f;
// nb : agir sur a14, a24 déplace le centre "de rotation"
}
If you draw something on the viewport the following transformations are performed:
Transformation by the modelview matrix (which may consists of a separated model matrix and view matrix).
Transformation by the projection matrix (regardless of whether you have perspective or orthographic projection).
After this transformations the coordinates are in the Normalized Device Coordinates space (NDC). The NDC is in a range from (-1,-1,-1) to (1,1,1).
If you want to convert back a point from the Normalized Device Coordinates (NDC) to the world space you have to do the exactly reverse transformations:
Transform back by the inverse projection matrix.
Transform back by the inverse modelview matrix.
See the answers to the Stack Overflow Question inverting a 4x4 matrix to see how to calculate a inverse matrix:
template < typename T_MD, typename T_MS = T_MD, typename T_SCALAR = float >
void mat44_inverse( T_MD &res, const T_MS &m )
{
T_SCALAR Coef00 = (T_SCALAR)(m[2][2] * m[3][3] - m[3][2] * m[2][3]);
T_SCALAR Coef02 = (T_SCALAR)(m[1][2] * m[3][3] - m[3][2] * m[1][3]);
T_SCALAR Coef03 = (T_SCALAR)(m[1][2] * m[2][3] - m[2][2] * m[1][3]);
T_SCALAR Coef04 = (T_SCALAR)(m[2][1] * m[3][3] - m[3][1] * m[2][3]);
T_SCALAR Coef06 = (T_SCALAR)(m[1][1] * m[3][3] - m[3][1] * m[1][3]);
T_SCALAR Coef07 = (T_SCALAR)(m[1][1] * m[2][3] - m[2][1] * m[1][3]);
T_SCALAR Coef08 = (T_SCALAR)(m[2][1] * m[3][2] - m[3][1] * m[2][2]);
T_SCALAR Coef10 = (T_SCALAR)(m[1][1] * m[3][2] - m[3][1] * m[1][2]);
T_SCALAR Coef11 = (T_SCALAR)(m[1][1] * m[2][2] - m[2][1] * m[1][2]);
T_SCALAR Coef12 = (T_SCALAR)(m[2][0] * m[3][3] - m[3][0] * m[2][3]);
T_SCALAR Coef14 = (T_SCALAR)(m[1][0] * m[3][3] - m[3][0] * m[1][3]);
T_SCALAR Coef15 = (T_SCALAR)(m[1][0] * m[2][3] - m[2][0] * m[1][3]);
T_SCALAR Coef16 = (T_SCALAR)(m[2][0] * m[3][2] - m[3][0] * m[2][2]);
T_SCALAR Coef18 = (T_SCALAR)(m[1][0] * m[3][2] - m[3][0] * m[1][2]);
T_SCALAR Coef19 = (T_SCALAR)(m[1][0] * m[2][2] - m[2][0] * m[1][2]);
T_SCALAR Coef20 = (T_SCALAR)(m[2][0] * m[3][1] - m[3][0] * m[2][1]);
T_SCALAR Coef22 = (T_SCALAR)(m[1][0] * m[3][1] - m[3][0] * m[1][1]);
T_SCALAR Coef23 = (T_SCALAR)(m[1][0] * m[2][1] - m[2][0] * m[1][1]);
_TMat44Helper<T_SCALAR> Fac0(Coef00, Coef00, Coef02, Coef03);
_TMat44Helper<T_SCALAR> Fac1(Coef04, Coef04, Coef06, Coef07);
_TMat44Helper<T_SCALAR> Fac2(Coef08, Coef08, Coef10, Coef11);
_TMat44Helper<T_SCALAR> Fac3(Coef12, Coef12, Coef14, Coef15);
_TMat44Helper<T_SCALAR> Fac4(Coef16, Coef16, Coef18, Coef19);
_TMat44Helper<T_SCALAR> Fac5(Coef20, Coef20, Coef22, Coef23);
_TMat44Helper<T_SCALAR> Vec0((T_SCALAR)m[1][0], (T_SCALAR)m[0][0], (T_SCALAR)m[0][0], (T_SCALAR)m[0][0]);
_TMat44Helper<T_SCALAR> Vec1((T_SCALAR)m[1][1], (T_SCALAR)m[0][1], (T_SCALAR)m[0][1], (T_SCALAR)m[0][1]);
_TMat44Helper<T_SCALAR> Vec2((T_SCALAR)m[1][2], (T_SCALAR)m[0][2], (T_SCALAR)m[0][2], (T_SCALAR)m[0][2]);
_TMat44Helper<T_SCALAR> Vec3((T_SCALAR)m[1][3], (T_SCALAR)m[0][3], (T_SCALAR)m[0][3], (T_SCALAR)m[0][3]);
_TMat44Helper<T_SCALAR> Inv0(Vec1 * Fac0 - Vec2 * Fac1 + Vec3 * Fac2);
_TMat44Helper<T_SCALAR> Inv1(Vec0 * Fac0 - Vec2 * Fac3 + Vec3 * Fac4);
_TMat44Helper<T_SCALAR> Inv2(Vec0 * Fac1 - Vec1 * Fac3 + Vec3 * Fac5);
_TMat44Helper<T_SCALAR> Inv3(Vec0 * Fac2 - Vec1 * Fac4 + Vec2 * Fac5);
_TMat44Helper<T_SCALAR> SignA((T_SCALAR)+1.0, (T_SCALAR)-1.0, (T_SCALAR)+1.0, (T_SCALAR)-1.0);
_TMat44Helper<T_SCALAR> SignB((T_SCALAR)-1.0, (T_SCALAR)+1.0, (T_SCALAR)-1.0, (T_SCALAR)+1.0);
std::array< _TMat44Helper<T_SCALAR>, 4 > Inverse{ Inv0 * SignA, Inv1 * SignB, Inv2 * SignA, Inv3 * SignB };
_TMat44Helper<T_SCALAR> Row0(Inverse[0][0], Inverse[1][0], Inverse[2][0], Inverse[3][0]);
_TMat44Helper<T_SCALAR> Dot0( Row0 );
Dot0 = Dot0 * _TMat44Helper<T_SCALAR>( (T_SCALAR)m[0][0], (T_SCALAR)m[0][1], (T_SCALAR)m[0][2], (T_SCALAR)m[0][3] );
T_SCALAR Dot1 = (Dot0[0] + Dot0[1]) + (Dot0[2] + Dot0[3]);
T_SCALAR OneOverDeterminant = static_cast<T_SCALAR>(1.0) / Dot1;
for ( int inx1 = 0; inx1 < 4; inx1 ++ )
{
for ( int inx2 = 0; inx2 < 4; inx2 ++ )
res[inx1][inx2] = Inverse[inx1][inx2] * OneOverDeterminant;
}
}
template< typename SCALAR_TYPE = float >
struct _TMat44Helper
{
_TMat44Helper( void ) {}
_TMat44Helper( SCALAR_TYPE x, SCALAR_TYPE y, SCALAR_TYPE z, SCALAR_TYPE w ) : m_v( { x, y, z, w } ){}
_TMat44Helper( const std::array< SCALAR_TYPE, 4 > & v ) : m_v( v ) {}
_TMat44Helper( const SCALAR_TYPE v[4] ) { std::memcpy( m_v.data(), v, 4 * sizeof( SCALAR_TYPE ) ); }
_TMat44Helper( const _TMat44Helper &src ) : m_v( src.m_v ) {}
_TMat44Helper & operator = ( const _TMat44Helper<SCALAR_TYPE> &src ) { m_v = src.m_v; return *this; }
SCALAR_TYPE & operator[](int inx) { return m_v[inx]; }
template< typename SCALAR_TYPE_OP >
_TMat44Helper<SCALAR_TYPE> operator *( SCALAR_TYPE_OP s )
{
_TMat44Helper res;
for ( int inx = 0; inx < 4; inx ++ )
res.m_v[inx] = m_v[inx] * (SCALAR_TYPE)s;
return res;
}
template< typename SCALAR_TYPE_OP >
_TMat44Helper<SCALAR_TYPE> operator +( const _TMat44Helper<SCALAR_TYPE_OP> &b )
{
_TMat44Helper<SCALAR_TYPE> res;
for ( int inx = 0; inx < 4; inx ++ )
res.m_v[inx] = m_v[inx] + (SCALAR_TYPE)b.m_v[inx];
return res;
}
template< typename SCALAR_TYPE_OP >
_TMat44Helper<SCALAR_TYPE> operator -( const _TMat44Helper<SCALAR_TYPE_OP> &b )
{
_TMat44Helper<SCALAR_TYPE> res;
for ( int inx = 0; inx < 4; inx ++ )
res.m_v[inx] = m_v[inx] - (SCALAR_TYPE)b.m_v[inx];
return res;
}
template< typename SCALAR_TYPE_OP >
_TMat44Helper<SCALAR_TYPE> operator *( const _TMat44Helper<SCALAR_TYPE_OP> &b )
{
_TMat44Helper<SCALAR_TYPE> res;
for ( int inx = 0; inx < 4; inx ++ )
res.m_v[inx] = m_v[inx] * (SCALAR_TYPE)b.m_v[inx];
return res;
}
std::array< SCALAR_TYPE, 4 > m_v;
};
To map a point from viewport (screen) coordinates to the NDC you have to map the X-coordinate and the Y-coordinate to the range (-1.0, 1.0).
The Z-coordinate is more tricky, for this you have to access the depth buffer (The depth buffer can be rendered to a texture).
Read the depth at the XY position and convert it to the NDC:
X_ndc = X_screen * 2.0 / VP_sizeX - 1.0;
Y_ndc = Y_screen * 2.0 / VP_sizeY - 1.0;
Z_ndc = 2.0 * depth - 1.0;
In your case you have only a 2D geometry and an orthographic projection, so you do not have to care about the depth, because the Z coordinate is always 0.0.
If you transform a point by the projection matrix (or the inverse projection matrix) you will get a point in a homogeneous coordinate system. To transform a point from a homogeneous coordinates system to a cartesian coordinate system you have to devide its X, Y and Z coordinate by it weight.
A general transformation function of a point in a cartesian coordinate system with a homogeneous 4*4 matrix may look like this:
using TVec3 = std::array<float, 3>;
using TVec4 = std::array<float, 4>;
using TMat4 = std::array<TVec4, 4>;
TVec3 Transform( const TVec3 &vec, const TMat4 &mat )
{
h {
vec[0] * mat[0][0] + vec[1] * mat[1][0] + vec[2] * mat[2][0] + mat[3][0],
vec[0] * mat[0][1] + vec[1] * mat[1][1] + vec[2] * mat[2][1] + mat[3][1],
vec[0] * mat[0][2] + vec[1] * mat[1][2] + vec[2] * mat[2][2] + mat[3][2],
vec[0] * mat[0][3] + vec[1] * mat[1][3] + vec[2] * mat[2][3] + mat[3][3]
};
if ( h[3] == 0.0 )
return TVec3{ 1.0e99 }; // division by zero
return TVec3{ h[0]/h[3], h[1]/h[3], h[2]/h[3] };
}
See the following WebGL example, where the color of the object, where the mouse hovers, is found by calculating back from the screen coordinates to the model coordinates.
<script type="text/javascript">
draw_vert =
"precision mediump float; \n" +
"attribute vec2 inPos; \n" +
"uniform mat4 u_projectionMat44;" +
"uniform mat4 u_modelViewMat44;" +
"void main()" +
"{" +
" vec4 viewPos = u_modelViewMat44 * vec4( inPos.xy, 0.0, 1.0 );" +
" gl_Position = u_projectionMat44 * viewPos;" +
"}";
draw_frag =
"precision mediump float; \n" +
"uniform vec3 u_color;" +
"void main()" +
"{" +
" gl_FragColor = vec4( u_color.xyz, 1.0 );" +
"}";
glArrayType = typeof Float32Array !="undefined" ? Float32Array : ( typeof WebGLFloatArray != "undefined" ? WebGLFloatArray : Array );
function IdentityMat44() {
var m = new glArrayType(16);
m[0] = 1; m[1] = 0; m[2] = 0; m[3] = 0;
m[4] = 0; m[5] = 1; m[6] = 0; m[7] = 0;
m[8] = 0; m[9] = 0; m[10] = 1; m[11] = 0;
m[12] = 0; m[13] = 0; m[14] = 0; m[15] = 1;
return m;
};
function RotateAxis(matA, angRad, axis) {
var aMap = [ [1, 2], [2, 0], [0, 1] ];
var a0 = aMap[axis][0], a1 = aMap[axis][1];
var sinAng = Math.sin(angRad), cosAng = Math.cos(angRad);
var matB = new glArrayType(16);
for ( var i = 0; i < 16; ++ i ) matB[i] = matA[i];
for ( var i = 0; i < 3; ++ i ) {
matB[a0*4+i] = matA[a0*4+i] * cosAng + matA[a1*4+i] * sinAng;
matB[a1*4+i] = matA[a0*4+i] * -sinAng + matA[a1*4+i] * cosAng;
}
return matB;
}
function Translate( matA, trans ) {
var matB = new glArrayType(16);
for ( var i = 0; i < 16; ++ i ) matB[i] = matA[i];
for ( var i = 0; i < 3; ++ i )
matB[12+i] = matA[i] * trans[0] + matA[4+i] * trans[1] + matA[8+i] * trans[2] + matA[12+i];
return matB;
}
function Scale( matA, scale ) {
var matB = new glArrayType(16);
for ( var i = 0; i < 16; ++ i ) matB[i] = matA[i];
for ( var a = 0; a < 4; ++ a )
for ( var i = 0; i < 3; ++ i )
matB[a*4+i] = matA[a*4+i] * scale[0];
return matB;
}
Ortho = function( l, r, t, b, n, f ) {
var fn = f + n;
var f_n = f - n;
var m = IdentityMat44();
m[0] = 2/(r-l); m[1] = 0; m[2] = 0; m[3] = 0;
m[4] = 0; m[5] = 2/(t-b); m[6] = 0; m[7] = 0;
m[8] = 0; m[9] = 0; m[10] = -2 / f_n; m[11] = -fn / f_n;
m[12] = 0; m[13] = 0; m[14] = 0; m[15] = 1;
return m;
}
vec4_add = function( a, b ) { return [ a[0]+b[0], a[1]+b[1], a[2]+b[2], a[3]+b[3] ]; }
vec4_sub = function( a, b ) { return [ a[0]-b[0], a[1]-b[1], a[2]-b[2], a[3]-b[3] ]; }
vec4_mul = function( a, b ) { return [ a[0]*b[0], a[1]*b[1], a[2]*b[2], a[3]*b[3] ]; }
vec4_scale = function( a, s ) { return [ a[0]*s, a[1]*s, a[2]*s, a[3]*s ]; }
mat44_inverse = function( m ) {
var Coef00 = m[2*4+2] * m[3*4+3] - m[3*4+2] * m[2*4+3];
var Coef02 = m[1*4+2] * m[3*4+3] - m[3*4+2] * m[1*4+3];
var Coef03 = m[1*4+2] * m[2*4+3] - m[2*4+2] * m[1*4+3];
var Coef04 = m[2*4+1] * m[3*4+3] - m[3*4+1] * m[2*4+3];
var Coef06 = m[1*4+1] * m[3*4+3] - m[3*4+1] * m[1*4+3];
var Coef07 = m[1*4+1] * m[2*4+3] - m[2*4+1] * m[1*4+3];
var Coef08 = m[2*4+1] * m[3*4+2] - m[3*4+1] * m[2*4+2];
var Coef10 = m[1*4+1] * m[3*4+2] - m[3*4+1] * m[1*4+2];
var Coef11 = m[1*4+1] * m[2*4+2] - m[2*4+1] * m[1*4+2];
var Coef12 = m[2*4+0] * m[3*4+3] - m[3*4+0] * m[2*4+3];
var Coef14 = m[1*4+0] * m[3*4+3] - m[3*4+0] * m[1*4+3];
var Coef15 = m[1*4+0] * m[2*4+3] - m[2*4+0] * m[1*4+3];
var Coef16 = m[2*4+0] * m[3*4+2] - m[3*4+0] * m[2*4+2];
var Coef18 = m[1*4+0] * m[3*4+2] - m[3*4+0] * m[1*4+2];
var Coef19 = m[1*4+0] * m[2*4+2] - m[2*4+0] * m[1*4+2];
var Coef20 = m[2*4+0] * m[3*4+1] - m[3*4+0] * m[2*4+1];
var Coef22 = m[1*4+0] * m[3*4+1] - m[3*4+0] * m[1*4+1];
var Coef23 = m[1*4+0] * m[2*4+1] - m[2*4+0] * m[1*4+1];
var Fac0 = [Coef00, Coef00, Coef02, Coef03];
var Fac1 = [Coef04, Coef04, Coef06, Coef07];
var Fac2 = [Coef08, Coef08, Coef10, Coef11];
var Fac3 = [Coef12, Coef12, Coef14, Coef15];
var Fac4 = [Coef16, Coef16, Coef18, Coef19];
var Fac5 = [Coef20, Coef20, Coef22, Coef23];
var Vec0 = [ m[1*4+0], m[0*4+0], m[0*4+0], m[0*4+0] ];
var Vec1 = [ m[1*4+1], m[0*4+1], m[0*4+1], m[0*4+1] ];
var Vec2 = [ m[1*4+2], m[0*4+2], m[0*4+2], m[0*4+2] ];
var Vec3 = [ m[1*4+3], m[0*4+3], m[0*4+3], m[0*4+3] ];
var Inv0 = vec4_add( vec4_sub( vec4_mul(Vec1, Fac0), vec4_mul(Vec2, Fac1) ), vec4_mul( Vec3, Fac2 ) );
var Inv1 = vec4_add( vec4_sub( vec4_mul(Vec0, Fac0), vec4_mul(Vec2, Fac3) ), vec4_mul( Vec3, Fac4 ) );
var Inv2 = vec4_add( vec4_sub( vec4_mul(Vec0, Fac1), vec4_mul(Vec1, Fac3) ), vec4_mul( Vec3, Fac5 ) );
var Inv3 = vec4_add( vec4_sub( vec4_mul(Vec0, Fac2), vec4_mul(Vec1, Fac4) ), vec4_mul( Vec2, Fac5 ) );
var SignA = [+1.0, -1.0, +1.0, -1.0];
var SignB = [-1.0, +1.0, -1.0, +1.0];
var Inverse = [ vec4_mul(Inv0, SignA), vec4_mul(Inv1, SignB), vec4_mul(Inv2, SignA), vec4_mul(Inv3, SignB) ];
var Row0 = [Inverse[0][0], Inverse[1][0], Inverse[2][0], Inverse[3][0] ];
var Dot0 = [Row0[0], Row0[1], Row0[2], Row0[3] ];
Dot0 = vec4_mul( Dot0, [ m[0], m[1], m[2], m[3] ] );
var Dot1 = (Dot0[0] + Dot0[1]) + (Dot0[2] + Dot0[3]);
var OneOverDeterminant = 1 / Dot1;
var res = IdentityMat44();
for ( var inx1 = 0; inx1 < 4; inx1 ++ ) {
for ( var inx2 = 0; inx2 < 4; inx2 ++ )
res[inx1*4+inx2] = Inverse[inx1][inx2] * OneOverDeterminant;
}
return res;
}
Transform = function(vec, mat) {
var h = [
vec[0] * mat[0*4+0] + vec[1] * mat[1*4+0] + vec[2] * mat[2*4+0] + mat[3*4+0],
vec[0] * mat[0*4+1] + vec[1] * mat[1*4+1] + vec[2] * mat[2*4+1] + mat[3*4+1],
vec[0] * mat[0*4+2] + vec[1] * mat[1*4+2] + vec[2] * mat[2*4+2] + mat[3*4+2],
vec[0] * mat[0*4+3] + vec[1] * mat[1*4+3] + vec[2] * mat[2*4+3] + mat[3*4+3] ]
if ( h[3] == 0.0 )
return [0, 0, 0]
return [ h[0]/h[3], h[1]/h[3], h[2]/h[3] ];
}
// shader program object
var ShaderProgram = {};
ShaderProgram.Create = function( shaderList, uniformNames ) {
var shaderObjs = [];
for ( var i_sh = 0; i_sh < shaderList.length; ++ i_sh ) {
var shderObj = this.CompileShader( shaderList[i_sh].source, shaderList[i_sh].stage );
if ( shderObj == 0 )
return 0;
shaderObjs.push( shderObj );
}
var progObj = this.LinkProgram( shaderObjs )
if ( progObj != 0 ) {
progObj.unifomLocation = {};
for ( var i_n = 0; i_n < uniformNames.length; ++ i_n ) {
var name = uniformNames[i_n];
progObj.unifomLocation[name] = gl.getUniformLocation( progObj, name );
}
}
return progObj;
}
ShaderProgram.Use = function( progObj ) { gl.useProgram( progObj ); }
ShaderProgram.SetUniformInt = function( progObj, name, val ) { gl.uniform1i( progObj.unifomLocation[name], val ); }
ShaderProgram.SetUniform2f = function( progObj, name, arr ) { gl.uniform2fv( progObj.unifomLocation[name], arr ); }
ShaderProgram.SetUniform3f = function( progObj, name, arr ) { gl.uniform3fv( progObj.unifomLocation[name], arr ); }
ShaderProgram.SetUniformMat44 = function( progObj, name, mat ) { gl.uniformMatrix4fv( progObj.unifomLocation[name], false, mat ); }
ShaderProgram.CompileShader = function( source, shaderStage ) {
var shaderObj = gl.createShader( shaderStage );
gl.shaderSource( shaderObj, source );
gl.compileShader( shaderObj );
var status = gl.getShaderParameter( shaderObj, gl.COMPILE_STATUS );
if ( !status ) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : 0;
}
ShaderProgram.LinkProgram = function( shaderObjs ) {
var prog = gl.createProgram();
for ( var i_sh = 0; i_sh < shaderObjs.length; ++ i_sh )
gl.attachShader( prog, shaderObjs[i_sh] );
gl.linkProgram( prog );
status = gl.getProgramParameter( prog, gl.LINK_STATUS );
if ( !status ) alert("Could not initialise shaders");
gl.useProgram( null );
return status ? prog : 0;
}
function drawScene(){
var canvas = document.getElementById( "camera-canvas" );
var currentTime = Date.now();
var deltaMS = currentTime - startTime;
var aspect = canvas.width / canvas.height;
var matOrtho = Ortho( -aspect, aspect, 1, -1, -1, 1 );
var matOrthoInv = mat44_inverse( matOrtho )
gl.viewport( 0, 0, canvas.width, canvas.height );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
ShaderProgram.Use( progDraw );
gl.enableVertexAttribArray( progDraw.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.vertexAttribPointer( progDraw.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
ShaderProgram.SetUniformMat44( progDraw, "u_projectionMat44", matOrtho );
var col = [ [1.0,0.0,0.0], [1.0,1.0,0.0], [0.0,0.0,1.0] ];
var invMat = []
for ( var i = 0; i < 3; ++ i ) {
var modelMat = Scale( IdentityMat44(), [ 0.3, 0.3, 0.3] );
var angRad = CalcAng( currentTime, 20.0 ) + i * Math.PI * 2 / 3;
var sinAng = Math.sin(angRad), cosAng = Math.cos(angRad);
modelMat[12] = cosAng * 0.6;
modelMat[13] = sinAng * 0.6;
invMat.push( mat44_inverse( modelMat ) );
ShaderProgram.SetUniformMat44( progDraw, "u_modelViewMat44", modelMat );
ShaderProgram.SetUniform3f( progDraw, "u_color", col[i] );
gl.drawElements( gl.TRIANGLES, bufObj.inx.len, gl.UNSIGNED_SHORT, 0 );
}
gl.disableVertexAttribArray( progDraw.pos );
var newColor = "#000000";
var colorMap = ["#ff0000", "#ffff00", "#0000ff" ];
var pos = [-1, -1];
if (mousePos[0] > 0 && mousePos[1] > 0 ) {
var pos = [2.0 * mousePos[0] / canvas.width - 1.0, 1.0 - 2.0 * mousePos[1] / canvas.height];
for ( var i = 0; i < 3; ++ i ) {
var testVec = [ pos[0], pos[1], 0 ];
testVec = Transform(testVec, matOrthoInv);
testVec = Transform(testVec, invMat[i]);
if (testVec[0] > -1.0 && testVec[0] < 1.0 && testVec[1] > -1.0 && testVec[1] < 1.0 ) {
newColor = colorMap[i];
}
}
}
document.getElementById( "color" ).value = newColor;
document.getElementById( "mouseX" ).innerHTML = pos[0];
document.getElementById( "mouseY" ).innerHTML = pos[1];
}
var startTime;
function Fract( val ) {
return val - Math.trunc( val );
}
function CalcAng( currentTime, intervall ) {
return Fract( (currentTime - startTime) / (1000*intervall) ) * 2.0 * Math.PI;
}
function CalcMove( currentTime, intervall, range ) {
var pos = self.Fract( (currentTime - startTime) / (1000*intervall) ) * 2.0
var pos = pos < 1.0 ? pos : (2.0-pos)
return range[0] + (range[1] - range[0]) * pos;
}
var mousePos = [-1, -1];
var gl;
var prog;
var bufObj = {};
function cameraStart() {
var canvas = document.getElementById( "camera-canvas");
gl = canvas.getContext( "experimental-webgl" );
if ( !gl )
return;
progDraw = ShaderProgram.Create(
[ { source : draw_vert, stage : gl.VERTEX_SHADER },
{ source : draw_frag, stage : gl.FRAGMENT_SHADER }
],
[ "u_projectionMat44", "u_modelViewMat44", "u_color"] );
progDraw.inPos = gl.getAttribLocation( progDraw, "inPos" );
if ( prog == 0 )
return;
var pos = [ -1, -1, 1, -1, 1, 1, -1, 1 ];
var inx = [ 0, 1, 2, 0, 2, 3 ];
bufObj.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( pos ), gl.STATIC_DRAW );
bufObj.inx = gl.createBuffer();
bufObj.inx.len = inx.length;
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( inx ), gl.STATIC_DRAW );
startTime = Date.now();
setInterval(drawScene, 50);
}
(function() {
document.onmousemove = handleMouseMove;
function handleMouseMove(event) {
var dot, eventDoc, doc, body, pageX, pageY;
event = event || window.event; // IE-ism
if (event.pageX == null && event.clientX != null) {
eventDoc = (event.target && event.target.ownerDocument) || document;
doc = eventDoc.documentElement;
body = eventDoc.body;
event.pageX = event.clientX +
(doc && doc.scrollLeft || body && body.scrollLeft || 0) -
(doc && doc.clientLeft || body && body.clientLeft || 0);
event.pageY = event.clientY +
(doc && doc.scrollTop || body && body.scrollTop || 0) -
(doc && doc.clientTop || body && body.clientTop || 0 );
}
var canvas = document.getElementById( "camera-canvas");
var x = event.pageX - canvas.offsetLeft;
var y = event.pageY - canvas.offsetTop;
mousePos = [-1, -1];
if ( x >= 0 && x < canvas.width && y >= 0 && y < canvas.height ) {
mousePos = [x, y];
}
}
})();
</script>
<body onload="cameraStart();">
<div style="margin-left: 260px;">
<div style="float: right; width: 100%; background-color: #CCF;">
<form name="inputs">
<table>
<tr> <td> <input type="color" value="#000000" id="color" disabled></td> </tr>
<tr> <td> <span id="mouseX">0</span> </td> </tr>
<tr> <td> <span id="mouseY">0</span> </td> </tr>
</table>
</form>
</div>
<div style="float: right; width: 260px; margin-left: -260px;">
<canvas id="camera-canvas" style="border: none;" width="256" height="256"></canvas>
</div>
<div style="clear: both;"></div>
</div>
</body>
Background:
- I've been spending the last 6 weeks trying to learn Directx 11 programming.
I'm currently rendering dds images on-screen following this book
http://www.amazon.co.uk/Beginning-Directx-11-Game-Programming/dp/1435458958/ref=sr_1_1?ie=UTF8&qid=1346510304&sr=8-1
I am trying to draw 2D sprites upon a 2D background which in itself also is a 2D sprite. Problem is that these two lines
d3dDevice_->CreateBlendState( &blendDesc, &alphaBlendState_ );
d3dContext_->OMSetBlendState( alphaBlendState_, blendFactor, 0xFFFFFFFF );
to enable the alpha transparency in the dds images. I like to have the option to blend sprites but right now every sprites is blended with my blue background thus resulting in blu logos, icons etc.
How do I make the Alpha Blending ignore a specific image in directx 11?
Since the background sprite is the first to be drawn I can't activate the Blending later in the code when rendering the other sprites without every sprite turning blue.
thanks in advance :)
EDIT:
I've been working on this issue for the past 3 weeks and still no solution. Everytime the blend state is enabled in the DeviceContext is seems to apply to every texture drawn on the screen no matter the order of rendering.
Here is the source code of the header file:
class GameSpriteDemo : public Dx11DemoBase
{
public:
GameSpriteDemo( );
virtual ~GameSpriteDemo( );
bool LoadContent( );
void UnloadContent( );
void Update( float dt );
void Render( );
void DisableBlending();
ID3D11ShaderResourceView* getColorMap(int);
int UniqSpriteAmount();
// text rendering system
private:
bool DrawString( char* message, float startX, float startY );
private:
ID3D11VertexShader* solidColorVS_;
ID3D11VertexShader* solidColorVS_2;
ID3DX11Effect* effect_;
vector<ID3D11VertexShader*> solidColorVShaders;
ID3D11PixelShader* solidColorPS_;
ID3D11PixelShader* solidColorPS_2;
ID3D11InputLayout* inputLayout_;
ID3D11InputLayout* inputLayout2;
ID3D11Buffer* vertexBuffer_;
vector<ID3D11Buffer*> vertexBuffers;
ID3D11ShaderResourceView* colorMap_;
vector<ID3D11ShaderResourceView*> colorMaps;
ID3D11SamplerState* colorMapSampler_;
vector<ID3D11SamplerState*> colorSamplers;
ID3D11BlendState* alphaBlendState_;
ID3D11BlendState* alphaBlendStateOff;
vector<LPCTSTR> elements2D;
vector<GameSprite> sprites_;
ID3D11Buffer* mvpCB_;
XMMATRIX vpMatrix_;
};
#endif
Here is the source code of the cpp file: (the Base Class DxD11DemoClass is not needed since it only sets up SwapChain and D3DContext)
bool GameSpriteDemo::LoadContent( )
{
// add sprite textures to the engine, giving them index 0, 1, 2... etc.
elements2D.push_back("placeholder3.dds");
elements2D.push_back("cav.dds");
elements2D.push_back("Stats.dds");
// create sprites
XMFLOAT2 sprite1Pos( 0.0f, 500.0f);
GameSprite sprite1(getDevice(), "placeholder3.dds", 0, sprite1Pos, 0.0f);
XMFLOAT2 sprite1Scale( 0.5f, 0.5f );
XMFLOAT2 mini(0.8f, 0.6f);
sprite1.SetScale(mini);
sprites_.push_back(sprite1);
XMFLOAT2 sprite2Pos( 200.0f, 100.0f );
GameSprite sprite2(getDevice(), "Stats.dds", 2, sprite2Pos, 2.0f);
// sprites_.push_back(sprite2);
XMFLOAT2 sprite3Pos( 290.0f, 300.0f );
GameSprite sprite3(getDevice(), "cav.dds", 1, sprite3Pos, 2.0f);
sprite3.SetScale(sprite1Scale);
sprites_.push_back(sprite3);
// create vertex buffers, colorMaps and colorSamplers
for (int i = 0; i <= elements2D.size(); i++) {
solidColorVShaders.push_back(solidColorVS_);
colorMaps.push_back(colorMap_);
colorSamplers.push_back(colorMapSampler_);
vertexBuffers.push_back(vertexBuffer_);
}
// continue
ID3DBlob* vsBuffer = 0;
bool compileResult = CompileD3DShader( "TextureMap.fx", "VS_Main", "vs_4_0", &vsBuffer );
if( compileResult == false )
{
DXTRACE_MSG( "Error compiling the vertex shader!" );
return false;
}
HRESULT d3dResult;
d3dResult = d3dDevice_->CreateVertexShader( vsBuffer->GetBufferPointer( ),
vsBuffer->GetBufferSize( ), 0, &solidColorVShaders[0] );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Error creating the vertex shader!" );
if( vsBuffer )
vsBuffer->Release( );
return false;
}
D3D11_INPUT_ELEMENT_DESC solidColorLayout[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 }
};
unsigned int totalLayoutElements = ARRAYSIZE( solidColorLayout );
d3dResult = d3dDevice_->CreateInputLayout( solidColorLayout, totalLayoutElements,
vsBuffer->GetBufferPointer( ), vsBuffer->GetBufferSize( ), &inputLayout_ );
vsBuffer->Release( );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Error creating the input layout!" );
return false;
}
ID3DBlob* psBuffer = 0;
compileResult = CompileD3DShader( "TextureMap.fx", "PS_Main", "ps_4_0", &psBuffer );
if( compileResult == false )
{
DXTRACE_MSG( "Error compiling pixel shader!" );
return false;
}
d3dResult = d3dDevice_->CreatePixelShader( psBuffer->GetBufferPointer( ),
psBuffer->GetBufferSize( ), 0, &solidColorPS_ );
psBuffer->Release( );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Error creating pixel shader!" );
return false;
}
// create ColorMap, ColorSampler and VertexBuffer for each unique sprite
D3D11_SAMPLER_DESC colorMapDesc;
D3D11_BUFFER_DESC vertexDesc;
D3D11_SUBRESOURCE_DATA resourceData;
for (int j = 0; j < elements2D.size(); j++) {
d3dResult = D3DX11CreateShaderResourceViewFromFile( d3dDevice_,
elements2D[j], 0, 0, &colorMaps[j], 0 );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to load the texture image!" );
return false;
}
ZeroMemory( &colorMapDesc, sizeof( colorMapDesc ) );
colorMapDesc.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
colorMapDesc.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
colorMapDesc.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
colorMapDesc.ComparisonFunc = D3D11_COMPARISON_NEVER;
colorMapDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
colorMapDesc.MaxLOD = D3D11_FLOAT32_MAX;
d3dResult = d3dDevice_->CreateSamplerState( &colorMapDesc, &colorSamplers[j] );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to create color map sampler state!" );
return false;
}
float Width = sprites_[j].getWidth();
float Height = sprites_[j].getHeight();
VertexPos vertices[] =
{
{ XMFLOAT3( Width, Height, 1.0f ), XMFLOAT2( 1.0f, 0.0f ) },
{ XMFLOAT3( Width, -Height, 1.0f ), XMFLOAT2( 1.0f, 1.0f ) },
{ XMFLOAT3( -Width, -Height, 1.0f ), XMFLOAT2( 0.0f, 1.0f ) },
{ XMFLOAT3( -Width, -Height, 1.0f ), XMFLOAT2( 0.0f, 1.0f ) },
{ XMFLOAT3( -Width, Height, 1.0f ), XMFLOAT2( 0.0f, 0.0f ) },
{ XMFLOAT3( Width, Height, 1.0f ), XMFLOAT2( 1.0f, 0.0f ) },
};
ZeroMemory( &vertexDesc, sizeof( vertexDesc ) );
vertexDesc.Usage = D3D11_USAGE_DEFAULT;
vertexDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
vertexDesc.ByteWidth = sizeof( VertexPos ) * 6;
ZeroMemory( &resourceData, sizeof( resourceData ) );
resourceData.pSysMem = vertices;
d3dResult = d3dDevice_->CreateBuffer( &vertexDesc, &resourceData, &vertexBuffers[j] );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to create vertex buffer!" );
return false;
}
}
// end of foor loop
// create text fonts
d3dResult = D3DX11CreateShaderResourceViewFromFile( d3dDevice_,
"font.dds", 0, 0, &colorMap_, 0 );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to load the texture image!" );
return false;
}
D3D11_SAMPLER_DESC colorMapDesc2;
ZeroMemory( &colorMapDesc2, sizeof( colorMapDesc2 ) );
colorMapDesc2.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
colorMapDesc2.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
colorMapDesc2.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
colorMapDesc2.ComparisonFunc = D3D11_COMPARISON_NEVER;
colorMapDesc2.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
colorMapDesc2.MaxLOD = D3D11_FLOAT32_MAX;
d3dResult = d3dDevice_->CreateSamplerState( &colorMapDesc2, &colorMapSampler_ );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to create color map sampler state!" );
return false;
}
D3D11_BUFFER_DESC vertexDesc2;
ZeroMemory( &vertexDesc2, sizeof( vertexDesc2 ) );
vertexDesc2.Usage = D3D11_USAGE_DYNAMIC;
vertexDesc2.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
vertexDesc2.BindFlags = D3D11_BIND_VERTEX_BUFFER;
const int sizeOfSprite = sizeof( VertexPos ) * 6;
const int maxLetters = 24;
vertexDesc2.ByteWidth = sizeOfSprite * maxLetters;
d3dResult = d3dDevice_->CreateBuffer( &vertexDesc2, 0, &vertexBuffer_ );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to create vertex buffer!" );
return false;
}
// end of font creation
D3D11_BUFFER_DESC constDesc;
ZeroMemory( &constDesc, sizeof( constDesc ) );
constDesc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
constDesc.ByteWidth = sizeof( XMMATRIX );
constDesc.Usage = D3D11_USAGE_DEFAULT;
d3dResult = d3dDevice_->CreateBuffer( &constDesc, 0, &mvpCB_ );
if( FAILED( d3dResult ) )
{
return false;
}
XMMATRIX view = XMMatrixIdentity( );
XMMATRIX projection = XMMatrixOrthographicOffCenterLH( 0.0f, 1366.0f, 0.0f, 768.0f, 0.1f, 100.0f );
vpMatrix_ = XMMatrixMultiply( view, projection );
D3D11_BLEND_DESC blendDesc;
ZeroMemory( &blendDesc, sizeof( blendDesc ) );
blendDesc.RenderTarget[0].BlendEnable = TRUE;
blendDesc.RenderTarget[0].BlendOp = D3D11_BLEND_OP_ADD;
blendDesc.RenderTarget[0].SrcBlend = D3D11_BLEND_SRC_ALPHA;
blendDesc.RenderTarget[0].DestBlend = D3D11_BLEND_ONE;
blendDesc.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD;
blendDesc.RenderTarget[0].SrcBlendAlpha = D3D11_BLEND_ZERO;
blendDesc.RenderTarget[0].DestBlendAlpha = D3D11_BLEND_ZERO;
blendDesc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
float blendFactor[4] = { 0.0f, 0.0f, 0.0f, 0.0f };
d3dDevice_->CreateBlendState( &blendDesc, &alphaBlendState_ );
// create the disabled blend state
blendDesc.RenderTarget[0].BlendEnable = FALSE;
d3dDevice_->CreateBlendState( &blendDesc, &alphaBlendStateOff );
d3dContext_->OMSetBlendState( alphaBlendState_, blendFactor, 0xFFFFFFFF );
// holy shit here we go again
ID3DBlob* vsBuffer2 = 0;
compileResult = CompileD3DShader( "TextMap.fx", "VS_Main", "vs_4_0", &vsBuffer2 );
if( compileResult == false )
{
DXTRACE_MSG( "Error compiling the vertex shader!" );
return false;
}
d3dResult = d3dDevice_->CreateVertexShader( vsBuffer2->GetBufferPointer( ),
vsBuffer2->GetBufferSize( ), 0, &solidColorVS_2 );
ID3DBlob* psBuffer2 = 0;
compileResult = CompileD3DShader( "TextureMap.fx", "PS_Main", "ps_4_0", &psBuffer2 );
if( compileResult == false )
{
DXTRACE_MSG( "Error compiling pixel shader!" );
return false;
}
d3dResult = d3dDevice_->CreatePixelShader( psBuffer2->GetBufferPointer( ),
psBuffer2->GetBufferSize( ), 0, &solidColorPS_2 );
psBuffer2->Release( );
return true;
}
void GameSpriteDemo::UnloadContent( )
{
if( colorMapSampler_ ) colorMapSampler_->Release( );
if( colorMap_ ) colorMap_->Release( );
if( solidColorVShaders[0] ) solidColorVShaders[0]->Release( );
if( solidColorPS_ ) solidColorPS_->Release( );
if( inputLayout_ ) inputLayout_->Release( );
if( vertexBuffer_ ) vertexBuffer_->Release( );
if( mvpCB_ ) mvpCB_->Release( );
if( alphaBlendState_ ) alphaBlendState_->Release( );
if( alphaBlendStateOff ) alphaBlendStateOff->Release( );
colorMapSampler_ = 0;
colorMap_ = 0;
solidColorVShaders[0] = 0;
solidColorPS_ = 0;
inputLayout_ = 0;
vertexBuffer_ = 0;
mvpCB_ = 0;
alphaBlendState_ = 0;
alphaBlendStateOff = 0;
}
void GameSpriteDemo::Update( float dt )
{
// Nothing to update
}
bool GameSpriteDemo::DrawString( char* message, float startX, float startY )
{
// Size in bytes for a single sprite.
const int sizeOfSprite = sizeof( VertexPos ) * 6;
// Demo's dynamic buffer setup for max of 24 letters.
const int maxLetters = 24;
int length = strlen( message );
// Clamp for strings too long.
if( length > maxLetters )
length = maxLetters;
// Char's width on screen.
float charWidth = 32.0f / (1366.0f*1.0f);
// Char's height on screen.
float charHeight = 32.0f / (768.0f*1.0f);
// Char's texel width.
float texelWidth = 32.0f / 896.0f;
// verts per-triangle (3) * total triangles (2) = 6.
const int verticesPerLetter = 6;
D3D11_MAPPED_SUBRESOURCE mapResource;
HRESULT d3dResult = d3dContext_->Map( vertexBuffer_, 0, D3D11_MAP_WRITE_DISCARD, 0, &mapResource );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to map resource!" );
return false;
}
// Point to our vertex buffer's internal data.
VertexPos *spritePtr = ( VertexPos* )mapResource.pData;
const int indexA = static_cast<char>( 'A' );
const int indexZ = static_cast<char>( 'Z' );
const int index_a = static_cast<char>( 'a' );
for( int i = 0; i < length; ++i )
{
float thisStartX = startX + ( charWidth * static_cast<float>( i ) );
float thisEndX = thisStartX + charWidth;
float thisEndY = startY + charHeight;
spritePtr[0].pos = XMFLOAT3( thisEndX, thisEndY, 1.0f );
spritePtr[1].pos = XMFLOAT3( thisEndX, startY, 1.0f );
spritePtr[2].pos = XMFLOAT3( thisStartX, startY, 1.0f );
spritePtr[3].pos = XMFLOAT3( thisStartX, startY, 1.0f );
spritePtr[4].pos = XMFLOAT3( thisStartX, thisEndY, 1.0f );
spritePtr[5].pos = XMFLOAT3( thisEndX, thisEndY, 1.0f );
int texLookup = 0;
int letter = static_cast<char>( message[i] );
if (letter == index_a) {
texLookup = (indexZ - indexA) + 2;
}
else if( letter < indexA || letter > indexZ )
{
// Grab one index past Z, which is a blank space in the texture.
texLookup = ( indexZ - indexA ) + 1;
}
else
{
// A = 0, B = 1, Z = 25, etc.
texLookup = ( letter - indexA );
}
float tuStart = 0.0f + ( texelWidth * static_cast<float>( texLookup ) );
float tuEnd = tuStart + texelWidth;
spritePtr[0].tex0 = XMFLOAT2( tuEnd, 0.0f );
spritePtr[1].tex0 = XMFLOAT2( tuEnd, 1.0f );
spritePtr[2].tex0 = XMFLOAT2( tuStart, 1.0f );
spritePtr[3].tex0 = XMFLOAT2( tuStart, 1.0f );
spritePtr[4].tex0 = XMFLOAT2( tuStart, 0.0f );
spritePtr[5].tex0 = XMFLOAT2( tuEnd, 0.0f );
spritePtr += 6;
}
d3dContext_->Unmap( vertexBuffer_, 0 );
d3dContext_->Draw( 6 * length, 0 );
return true;
}
void GameSpriteDemo::Render( )
{
if( d3dContext_ == 0 )
return;
float clearColor[4] = { 0.0f, 0.0f, 0.0f, 1.0f };
d3dContext_->ClearRenderTargetView( backBufferTarget_, clearColor );
unsigned int stride = sizeof( VertexPos );
unsigned int offset = 0;
d3dContext_->IASetInputLayout( inputLayout_ );
d3dContext_->IASetPrimitiveTopology( D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST );
d3dContext_->VSSetShader( solidColorVShaders[0], 0, 0 );
d3dContext_->PSSetShader( solidColorPS_, 0, 0 );
XMMATRIX world;
XMMATRIX mvp;
int index;
for (int k = 0; k < sprites_.size(); k++) {
index = sprites_[k].getIndex();
d3dContext_->IASetVertexBuffers( 0, 1, &vertexBuffers[index], &stride, &offset );
d3dContext_->PSSetShaderResources( 0, 1, &colorMaps[index] );
d3dContext_->PSSetSamplers( 0, 1, &colorSamplers[index] );
world = sprites_[k].GetWorldMatrix();
mvp = XMMatrixMultiply( world, vpMatrix_ );
mvp = XMMatrixTranspose( mvp );
d3dContext_->UpdateSubresource( mvpCB_, 0, 0, &mvp, 0, 0 );
d3dContext_->VSSetConstantBuffers( 0, 1, &mvpCB_ );
d3dContext_->Draw( 6, 0 );
}
// render text
DisableBlending();
d3dContext_->IASetVertexBuffers( 0, 1, &vertexBuffer_, &stride, &offset );
d3dContext_->PSSetShaderResources( 0, 1, &colorMap_ );
d3dContext_->PSSetSamplers( 0, 1, &colorMapSampler_ );
d3dContext_->VSSetShader( solidColorVS_2, 0, 0 );
d3dContext_->PSSetShader( solidColorPS_2, 0, 0 );
DrawString( "PROTOTYPE TEXT", -0.2f, 0.0f );
swapChain_->Present( 0, 0 );
}
ID3D11ShaderResourceView* GameSpriteDemo::getColorMap(int index) {
return colorMaps[index];
}
// this functions disables the alpha blending
void GameSpriteDemo::DisableBlending() {
float blendFactor[4] = { 0.0f, 0.0f, 0.0f, 0.0f };
d3dContext_->OMSetBlendState( alphaBlendStateOff, blendFactor, 0xFFFFFFFF );
}
Screen of the results when drawing with blending enabled
http://i49.tinypic.com/33kao9v.jpg
and here is the HLSL code for the two FX files I'm using (the first for the sprites, the second for the text)
TextureMap.fx
cbuffer cbChangesPerFrame : register( b0 )
{
matrix mvp_;
};
Texture2D colorMap_ : register( t0 );
SamplerState colorSampler_ : register( s0 );
struct VS_Input
{
float4 pos : POSITION;
float2 tex0 : TEXCOORD0;
};
struct PS_Input
{
float4 pos : SV_POSITION;
float2 tex0 : TEXCOORD0;
};
PS_Input VS_Main( VS_Input vertex )
{
PS_Input vsOut = ( PS_Input )0;
vsOut.pos = mul( vertex.pos, mvp_ );
vsOut.tex0 = vertex.tex0;
return vsOut;
}
float4 PS_Main( PS_Input frag ) : SV_TARGET
{
return colorMap_.Sample( colorSampler_, frag.tex0 );
}
TextMap.fx
Texture2D colorMap_ : register( t0 );
SamplerState colorSampler_ : register( s0 );
struct VS_Input
{
float4 pos : POSITION;
float2 tex0 : TEXCOORD0;
};
struct PS_Input
{
float4 pos : SV_POSITION;
float2 tex0 : TEXCOORD0;
};
PS_Input VS_Main( VS_Input vertex )
{
PS_Input vsOut = ( PS_Input )0;
vsOut.pos = vertex.pos;
vsOut.tex0 = vertex.tex0;
return vsOut;
}
float4 PS_Main( PS_Input frag ) : SV_TARGET
{
return colorMap_.Sample( colorSampler_, frag.tex0 );
}
I have wondered maybe to use pixel shaders to perform alpha blending for specific 2D images but my first attempts has not worked at all.
Hope someone can find a solution somehow :)
and thanks again for the help.
Gaussian, box, radial, directional, motion blur, zoom blur, etc.
I read that Gaussian blur can be broken down in passes that could be implemented in pixel shaders, but couldn't find any samples.
Is it right to assume that any effect that concerns itself with pixels other than itself, can't be implemented in pixel shaders?
You can implement everything, as long you are able to pass information to the shader.
The trick, in this cases, is to perform a multiple pass rendering. The final shader will take a certain number of samplers, that are the non-blurred sources, which are used to compute blurred values.
For example, using multiple textures is possible to emulate effects based on the accumulation buffer.
to implement a gaussian blur, render the scene onto a frambuffer object, with attached a texture on the color attachment. This is the first pass.
As second pass, render a textured quad, where the texture is the one generated in the first step. Textures coordinates are passed from vertex stage to fragment stage, interpolated across the quad. Indeed you have texture coordinate for each fragment; apply a offset for each coordinate to fetch textels around the underlying one, and perform the gaussian blur.
general 'pipeline' for postprocessing effects
setRenderTarget(myRenderTarget); // or FBO in GL
drawAwsomeScene();
setdefaultRenderTarget(); // draw to screen...
blurShader.use();
// shader needs to know what is the size of one pixel on the screen
blurShader.uniform2f("texelSize", 1/screenW, 1/screenH);
// set the texture with scene rendered...
setRenderTargetTexture();
drawFullScreenQuad();
// other effects...
useful examples/tutorial for blur: http://www.gamerendering.com/2008/10/11/gaussian-blur-filter-shader/
I implemented a generic convolution fragment shader (pixel shader)
#version 120
uniform sampler2D texUnit;
uniform float[9] conMatrix;
uniform float conWeight;
uniform vec2 conPixel;
void main(void)
{
vec4 color = vec4(0.0);
vec2 texCoord = gl_TexCoord[0].st;
vec2 offset = conPixel * 1.5;
vec2 start = texCoord - offset;
vec2 current = start;
for (int i = 0; i < 9; i++)
{
color += texture2D( texUnit, current ) * conMatrix[i];
current.x += conPixel.x;
if (i == 2 || i == 5) {
current.x = start.x;
current.y += conPixel.y;
}
}
gl_FragColor = color * conWeight;
}
For a Blur:
where conPixel is {1/screen width, 1/screen height}
where conMatrix is {1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0}
where conWeight is 1.0 / 9.0
where texUnit is 0
An often seen implementation to achieve a blur effect of a scene is the Gaussian blur, implemented by a 2 pass post-processing.
This is an approximation which first blurs along the X-Axis in the 1st pass and along the Y-Axis in the 2nd pass (or vice versa). This results in a better performance for strong blurring.
The blur shader uses a normal (or Gaussian) distribution. For the 2 passes is used the same shader program, with individual direction settings for the 2 passes, stored in the uniform vec2 u_dir. The strength of the blur effect can be varied with the uniform variable float u_sigma in the range [0.0, 1.0].
The scene is written to frame buffer with a texture bound to the color plane. A screen space pass uses the texture as the input to blur the output along the X-axis. The X-axis blur pass writes to another frame buffer, with a texture bound to its color plane. This texture is used as the input, for the final blur process along the Y-axis.
A detailed description of the blur algorithm can be found at the answer to the question OpenGL es 2.0 Gaussian blur on triangle or LeranOpenGL - Gaussian blur.
Blur Vertex shader
#version 330
in vec2 inPos;
out vec2 pos;
void main()
{
pos = inPos;
gl_Position = vec4( inPos, 0.0, 1.0 );
}
Blur Fragment shader
#version 330
in vec2 pos;
uniform sampler2D u_texture;
uniform vec2 u_textureSize;
uniform float u_sigma;
uniform vec2 u_dir;
float CalcGauss( float x, float sigma )
{
if ( sigma <= 0.0 )
return 0.0;
return exp( -(x*x) / (2.0 * sigma) ) / (2.0 * 3.14157 * sigma);
}
void main()
{
vec2 texC = pos.st * 0.5 + 0.5;
vec4 texCol = texture2D( u_texture, texC );
vec4 gaussCol = vec4( texCol.rgb, 1.0 );
vec2 step = u_dir / u_textureSize;
for ( int i = 1; i <= 32; ++ i )
{
float weight = CalcGauss( float(i) / 32.0, u_sigma * 0.5 );
if ( weight < 1.0/255.0 )
break;
texCol = texture2D( u_texture, texC + step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
texCol = texture2D( u_texture, texC - step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
}
gaussCol.rgb = clamp( gaussCol.rgb / gaussCol.w, 0.0, 1.0 );
gl_FragColor = vec4( gaussCol.rgb, 1.0 );
}
See also the answers to the following question:
Fast Gaussian blur at pause
OpenGL es 2.0 Gaussian blur on triangle
How to get a "Glow" shader effect in OpenGL ES 2.0?
See a WebGL example:
var readInput = true;
function changeEventHandler(event){
readInput = true;
}
(function loadscene() {
var resize, gl, progDraw, progBlur, vp_size, blurFB;
var canvas;
var camera;
var bufCube = {};
var bufQuad = {};
var shininess = 10.0;
var glow = 10.0;
var sigma = 0.8;
var radius = 1.0;
function render(deltaMS){
if ( readInput ) {
//readInput = false;
var sliderScale = 100;
sigma = document.getElementById( "sigma" ).value / sliderScale;
radius = document.getElementById( "radius" ).value / sliderScale;
}
vp_size = [canvas.width, canvas.height];
camera.Update( vp_size );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
// set up framebuffer
gl.bindFramebuffer( gl.FRAMEBUFFER, blurFB[0] );
gl.viewport( 0, 0, blurFB[0].width, blurFB[0].height );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
// setup view projection and model
var prjMat = camera.Perspective();
var viewMat = camera.Orbit();
var modelMat = IdentM44();
modelMat = camera.AutoModelMatrix();
// set up draw shader
ShProg.Use( progDraw.prog );
ShProg.SetM44( progDraw.prog, "u_projectionMat44", prjMat );
ShProg.SetM44( progDraw.prog, "u_modelViewMat44", Multiply(viewMat, modelMat) );
ShProg.SetF1( progDraw.prog, "u_shininess", shininess );
// draw scene
VertexBuffer.Draw( bufCube );
// set blur-X framebuffer and bind frambuffer texture
gl.bindFramebuffer( gl.FRAMEBUFFER, blurFB[1] );
gl.viewport( 0, 0, blurFB[1].width, blurFB[1].height );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
var texUnit = 1;
gl.activeTexture( gl.TEXTURE0 + texUnit );
gl.bindTexture( gl.TEXTURE_2D, blurFB[0].color0_texture );
// set up blur-X shader
ShProg.Use( progBlur.prog );
ShProg.SetI1( progBlur.prog, "u_texture", texUnit )
ShProg.SetF2( progBlur.prog, "u_textureSize", vp_size );
ShProg.SetF1( progBlur.prog, "u_sigma", sigma )
ShProg.SetF1( progBlur.prog, "u_radius", radius )
ShProg.SetF2( progBlur.prog, "u_dir", [1.0, 0.0] )
// draw full screen space
gl.enableVertexAttribArray( progBlur.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.vertexAttribPointer( progBlur.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.drawElements( gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progBlur.inPos );
// reset framebuffer and bind frambuffer texture
gl.bindFramebuffer( gl.FRAMEBUFFER, null );
gl.viewport( 0, 0, vp_size[0], vp_size[1] );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
texUnit = 2;
gl.activeTexture( gl.TEXTURE0 + texUnit );
gl.bindTexture( gl.TEXTURE_2D, blurFB[1].color0_texture );
// set up pst process shader
ShProg.SetI1( progBlur.prog, "u_texture", texUnit )
ShProg.SetF1( progBlur.prog, "u_radius", radius )
ShProg.SetF2( progBlur.prog, "u_dir", [0.0, 1.0] )
// draw full screen space
gl.enableVertexAttribArray( progBlur.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.vertexAttribPointer( progBlur.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.drawElements( gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progBlur.inPos );
requestAnimationFrame(render);
}
function initScene() {
canvas = document.getElementById( "canvas");
gl = canvas.getContext( "experimental-webgl" );
if ( !gl )
return null;
progDraw = {}
progDraw.prog = ShProg.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
if ( !progDraw.prog )
return null;
progDraw.inPos = gl.getAttribLocation( progDraw.prog, "inPos" );
progDraw.inNV = gl.getAttribLocation( progDraw.prog, "inNV" );
progDraw.inCol = gl.getAttribLocation( progDraw.prog, "inCol" );
progBlur = {}
progBlur.prog = ShProg.Create(
[ { source : "post-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "blur-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
progBlur.inPos = gl.getAttribLocation( progBlur.prog, "inPos" );
if ( !progBlur.prog )
return;
// create cube
var cubePos = [
-1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 1.0,
-1.0, -1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0 ];
var cubeCol = [ 1.0, 0.0, 0.0, 1.0, 0.5, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 ];
var cubeHlpInx = [ 0, 1, 2, 3, 1, 5, 6, 2, 5, 4, 7, 6, 4, 0, 3, 7, 3, 2, 6, 7, 1, 0, 4, 5 ];
var cubePosData = [];
for ( var i = 0; i < cubeHlpInx.length; ++ i ) {
cubePosData.push( cubePos[cubeHlpInx[i]*3], cubePos[cubeHlpInx[i]*3+1], cubePos[cubeHlpInx[i]*3+2] );
}
var cubeNVData = [];
for ( var i1 = 0; i1 < cubeHlpInx.length; i1 += 4 ) {
var nv = [0, 0, 0];
for ( i2 = 0; i2 < 4; ++ i2 ) {
var i = i1 + i2;
nv[0] += cubePosData[i*3]; nv[1] += cubePosData[i*3+1]; nv[2] += cubePosData[i*3+2];
}
for ( i2 = 0; i2 < 4; ++ i2 )
cubeNVData.push( nv[0], nv[1], nv[2] );
}
var cubeColData = [];
for ( var is = 0; is < 6; ++ is ) {
for ( var ip = 0; ip < 4; ++ ip ) {
cubeColData.push( cubeCol[is*3], cubeCol[is*3+1], cubeCol[is*3+2] );
}
}
var cubeInxData = [];
for ( var i = 0; i < cubeHlpInx.length; i += 4 ) {
cubeInxData.push( i, i+1, i+2, i, i+2, i+3 );
}
bufCube = VertexBuffer.Create(
[ { data : cubePosData, attrSize : 3, attrLoc : progDraw.inPos },
{ data : cubeNVData, attrSize : 3, attrLoc : progDraw.inNV },
{ data : cubeColData, attrSize : 3, attrLoc : progDraw.inCol } ],
cubeInxData );
bufQuad.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( [ -1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0 ] ), gl.STATIC_DRAW );
bufQuad.inx = gl.createBuffer();
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( [ 0, 1, 2, 0, 2, 3 ] ), gl.STATIC_DRAW );
camera = new Camera( [0, 3, 0.0], [0, 0, 0], [0, 0, 1], 90, vp_size, 0.5, 100 );
window.onresize = resize;
resize();
requestAnimationFrame(render);
}
function resize() {
//vp_size = [gl.drawingBufferWidth, gl.drawingBufferHeight];
vp_size = [window.innerWidth, window.innerHeight]
//vp_size = [256, 256]
canvas.width = vp_size[0];
canvas.height = vp_size[1];
var fbsize = Math.max(vp_size[0], vp_size[1]);
fbsize = 1 << 31 - Math.clz32(fbsize); // nearest power of 2
blurFB = [];
for ( var i = 0; i < 2; ++ i ) {
fb = gl.createFramebuffer();
fb.width = fbsize;
fb.height = fbsize;
gl.bindFramebuffer( gl.FRAMEBUFFER, fb );
fb.color0_texture = gl.createTexture();
gl.bindTexture( gl.TEXTURE_2D, fb.color0_texture );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST );
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, fb.width, fb.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null );
fb.renderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer( gl.RENDERBUFFER, fb.renderbuffer );
gl.renderbufferStorage( gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, fb.width, fb.height );
gl.framebufferTexture2D( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, fb.color0_texture, 0 );
gl.framebufferRenderbuffer( gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, fb.renderbuffer );
gl.bindTexture( gl.TEXTURE_2D, null );
gl.bindRenderbuffer( gl.RENDERBUFFER, null );
gl.bindFramebuffer( gl.FRAMEBUFFER, null );
blurFB.push( fb );
}
}
function Fract( val ) {
return val - Math.trunc( val );
}
function CalcAng( deltaTime, intervall ) {
return Fract( deltaTime / (1000*intervall) ) * 2.0 * Math.PI;
}
function CalcMove( deltaTime, intervall, range ) {
var pos = self.Fract( deltaTime / (1000*intervall) ) * 2.0
var pos = pos < 1.0 ? pos : (2.0-pos)
return range[0] + (range[1] - range[0]) * pos;
}
function EllipticalPosition( a, b, angRag ) {
var a_b = a * a - b * b
var ea = (a_b <= 0) ? 0 : Math.sqrt( a_b );
var eb = (a_b >= 0) ? 0 : Math.sqrt( -a_b );
return [ a * Math.sin( angRag ) - ea, b * Math.cos( angRag ) - eb, 0 ];
}
function IdentM44() {
return [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ];
};
function RotateAxis(matA, angRad, axis) {
var aMap = [ [1, 2], [2, 0], [0, 1] ];
var a0 = aMap[axis][0], a1 = aMap[axis][1];
var sinAng = Math.sin(angRad), cosAng = Math.cos(angRad);
var matB = matA.slice(0);
for ( var i = 0; i < 3; ++ i ) {
matB[a0*4+i] = matA[a0*4+i] * cosAng + matA[a1*4+i] * sinAng;
matB[a1*4+i] = matA[a0*4+i] * -sinAng + matA[a1*4+i] * cosAng;
}
return matB;
}
function Rotate(matA, angRad, axis) {
var s = Math.sin(angRad), c = Math.cos(angRad);
var x = axis[0], y = axis[1], z = axis[2];
matB = [
x*x*(1-c)+c, x*y*(1-c)-z*s, x*z*(1-c)+y*s, 0,
y*x*(1-c)+z*s, y*y*(1-c)+c, y*z*(1-c)-x*s, 0,
z*x*(1-c)-y*s, z*y*(1-c)+x*s, z*z*(1-c)+c, 0,
0, 0, 0, 1 ];
return Multiply(matA, matB);
}
function Multiply(matA, matB) {
matC = IdentM44();
for (var i0=0; i0<4; ++i0 )
for (var i1=0; i1<4; ++i1 )
matC[i0*4+i1] = matB[i0*4+0] * matA[0*4+i1] + matB[i0*4+1] * matA[1*4+i1] + matB[i0*4+2] * matA[2*4+i1] + matB[i0*4+3] * matA[3*4+i1]
return matC;
}
function Cross( a, b ) { return [ a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0], 0.0 ]; }
function Dot( a, b ) { return a[0]*b[0] + a[1]*b[1] + a[2]*b[2]; }
function Normalize( v ) {
var len = Math.sqrt( v[0] * v[0] + v[1] * v[1] + v[2] * v[2] );
return [ v[0] / len, v[1] / len, v[2] / len ];
}
Camera = function( pos, target, up, fov_y, vp, near, far ) {
this.Time = function() { return Date.now(); }
this.pos = pos;
this.target = target;
this.up = up;
this.fov_y = fov_y;
this.vp = vp;
this.near = near;
this.far = far;
this.orbit_mat = this.current_orbit_mat = this.model_mat = this.current_model_mat = IdentM44();
this.mouse_drag = this.auto_spin = false;
this.auto_rotate = true;
this.mouse_start = [0, 0];
this.mouse_drag_axis = [0, 0, 0];
this.mouse_drag_angle = 0;
this.mouse_drag_time = 0;
this.drag_start_T = this.rotate_start_T = this.Time();
this.Ortho = function() {
var fn = this.far + this.near;
var f_n = this.far - this.near;
var w = this.vp[0];
var h = this.vp[1];
return [
2/w, 0, 0, 0,
0, 2/h, 0, 0,
0, 0, -2/f_n, 0,
0, 0, -fn/f_n, 1 ];
};
this.Perspective = function() {
var n = this.near;
var f = this.far;
var fn = f + n;
var f_n = f - n;
var r = this.vp[0] / this.vp[1];
var t = 1 / Math.tan( Math.PI * this.fov_y / 360 );
return [
t/r, 0, 0, 0,
0, t, 0, 0,
0, 0, -fn/f_n, -1,
0, 0, -2*f*n/f_n, 0 ];
};
this.LookAt = function() {
var mz = Normalize( [ this.pos[0]-this.target[0], this.pos[1]-this.target[1], this.pos[2]-this.target[2] ] );
var mx = Normalize( Cross( this.up, mz ) );
var my = Normalize( Cross( mz, mx ) );
var tx = Dot( mx, this.pos );
var ty = Dot( my, this.pos );
var tz = Dot( [-mz[0], -mz[1], -mz[2]], this.pos );
return [mx[0], my[0], mz[0], 0, mx[1], my[1], mz[1], 0, mx[2], my[2], mz[2], 0, tx, ty, tz, 1];
};
this.Orbit = function() {
return Multiply(this.LookAt(), this.OrbitMatrix());
};
this.OrbitMatrix = function() {
return (this.mouse_drag || (this.auto_rotate && this.auto_spin)) ? Multiply(this.current_orbit_mat, this.orbit_mat) : this.orbit_mat;
};
this.AutoModelMatrix = function() {
return this.auto_rotate ? Multiply(this.current_model_mat, this.model_mat) : this.model_mat;
};
this.Update = function(vp_size) {
if (vp_size)
this.vp = vp_size;
var current_T = this.Time();
this.current_model_mat = IdentM44()
if (this.mouse_drag) {
this.current_orbit_mat = Rotate(IdentM44(), this.mouse_drag_angle, this.mouse_drag_axis);
} else if (this.auto_rotate) {
if (this.auto_spin ) {
if (this.mouse_drag_time > 0 ) {
var angle = this.mouse_drag_angle * (current_T - this.rotate_start_T) / this.mouse_drag_time;
this.current_orbit_mat = Rotate(IdentM44(), angle, this.mouse_drag_axis);
}
} else {
var auto_angle_x = Fract( (current_T - this.rotate_start_T) / 13000.0 ) * 2.0 * Math.PI;
var auto_angle_y = Fract( (current_T - this.rotate_start_T) / 17000.0 ) * 2.0 * Math.PI;
this.current_model_mat = RotateAxis( this.current_model_mat, auto_angle_x, 0 );
this.current_model_mat = RotateAxis( this.current_model_mat, auto_angle_y, 1 );
}
}
};
this.ChangeMotionMode = function(drag, spin, auto ) {
var new_drag = drag;
var new_auto = new_drag ? false : auto;
var new_spin = new_auto ? spin : false;
change = this.mouse_drag != new_drag || this.auto_rotate != new_auto || this.auto_spin != new_spin;
if (!change)
return;
if (new_drag && !this.mouse_drag) {
this.drag_start_T = this.Time();
this.mouse_drag_angle = 0.0;
this.mouse_drag_time = 0;
}
if (new_auto && !this.auto_rotate)
this.rotate_start_T = this.Time();
this.mouse_drag = new_drag;
this.auto_rotate = new_auto;
this.auto_spin = new_spin;
this.orbit_mat = Multiply(this.current_orbit_mat, this.orbit_mat);
this.current_orbit_mat = IdentM44();
this.model_mat = Multiply(this.current_model_mat, this.model_mat);
this.current_model_mat = IdentM44();
};
this.OnMouseDown = function( event ) {
var rect = gl.canvas.getBoundingClientRect();
if ( event.clientX < rect.left || event.clientX > rect.right ) return;
if ( event.clientY < rect.top || event.clientY > rect.bottom ) return;
if (event.button == 0) { // left button
this.mouse_start = [event.clientX, event.clientY];
this.ChangeMotionMode( true, false, false );
}
};
this.OnMouseUp = function( event ) {
if (event.button == 0) { // left button
this.ChangeMotionMode( false, true, true );
} else if (event.button == 1) {// middle button
this.ChangeMotionMode( false, false, !this.auto_rotate );
}
};
this.OnMouseMove = function( event ) {
var dx = (event.clientX-this.mouse_start[0]) / this.vp[0];
var dy = (event.clientY-this.mouse_start[1]) / this.vp[1];
var len = Math.sqrt(dx*dx + dy*dy);
if (this.mouse_drag && len > 0) {
this.mouse_drag_angle = Math.PI*len;
this.mouse_drag_axis = [dy/len, 0, -dx/len];
this.mouse_drag_time = this.Time() - this.drag_start_T;
}
};
this.domElement = document;
var cam = this;
//this.domElement.addEventListener( 'contextmenu', function(e) { event.preventDefault(); }, false );
this.domElement.addEventListener( 'mousedown', function(e) { cam.OnMouseDown(e) }, false );
this.domElement.addEventListener( 'mouseup', function(e) { cam.OnMouseUp(e) }, false );
this.domElement.addEventListener( 'mousemove', function(e) { cam.OnMouseMove(e) }, false );
//this.domElement.addEventListener( 'mousewheel', hid_events.onMouseWheel, false );
//this.domElement.addEventListener( 'DOMMouseScroll', hid_events.onMouseWheel, false ); // firefox
}
var ShProg = {};
ShProg.Create = function( shaderList ) {
var shaderObjs = [];
for ( var i_sh = 0; i_sh < shaderList.length; ++ i_sh ) {
var shderObj = this.Compile( shaderList[i_sh].source, shaderList[i_sh].stage );
if ( shderObj == 0 )
return 0;
shaderObjs.push( shderObj );
}
var progObj = this.Link( shaderObjs )
if ( progObj != 0 ) {
progObj.attrInx = {};
var noOfAttributes = gl.getProgramParameter( progObj, gl.ACTIVE_ATTRIBUTES );
for ( var i_n = 0; i_n < noOfAttributes; ++ i_n ) {
var name = gl.getActiveAttrib( progObj, i_n ).name;
progObj.attrInx[name] = gl.getAttribLocation( progObj, name );
}
progObj.uniLoc = {};
var noOfUniforms = gl.getProgramParameter( progObj, gl.ACTIVE_UNIFORMS );
for ( var i_n = 0; i_n < noOfUniforms; ++ i_n ) {
var name = gl.getActiveUniform( progObj, i_n ).name;
progObj.uniLoc[name] = gl.getUniformLocation( progObj, name );
}
}
return progObj;
}
ShProg.AttrI = function( progObj, name ) { return progObj.attrInx[name]; }
ShProg.UniformL = function( progObj, name ) { return progObj.uniLoc[name]; }
ShProg.Use = function( progObj ) { gl.useProgram( progObj ); }
ShProg.SetI1 = function( progObj, name, val ) { if(progObj.uniLoc[name]) gl.uniform1i( progObj.uniLoc[name], val ); }
ShProg.SetF1 = function( progObj, name, val ) { if(progObj.uniLoc[name]) gl.uniform1f( progObj.uniLoc[name], val ); }
ShProg.SetF2 = function( progObj, name, arr ) { if(progObj.uniLoc[name]) gl.uniform2fv( progObj.uniLoc[name], arr ); }
ShProg.SetF3 = function( progObj, name, arr ) { if(progObj.uniLoc[name]) gl.uniform3fv( progObj.uniLoc[name], arr ); }
ShProg.SetF4 = function( progObj, name, arr ) { if(progObj.uniLoc[name]) gl.uniform4fv( progObj.uniLoc[name], arr ); }
ShProg.SetM33 = function( progObj, name, mat ) { if(progObj.uniLoc[name]) gl.uniformMatrix3fv( progObj.uniLoc[name], false, mat ); }
ShProg.SetM44 = function( progObj, name, mat ) { if(progObj.uniLoc[name]) gl.uniformMatrix4fv( progObj.uniLoc[name], false, mat ); }
ShProg.Compile = function( source, shaderStage ) {
var shaderScript = document.getElementById(source);
if (shaderScript)
source = shaderScript.text;
var shaderObj = gl.createShader( shaderStage );
gl.shaderSource( shaderObj, source );
gl.compileShader( shaderObj );
var status = gl.getShaderParameter( shaderObj, gl.COMPILE_STATUS );
if ( !status ) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : null;
}
ShProg.Link = function( shaderObjs ) {
var prog = gl.createProgram();
for ( var i_sh = 0; i_sh < shaderObjs.length; ++ i_sh )
gl.attachShader( prog, shaderObjs[i_sh] );
gl.linkProgram( prog );
status = gl.getProgramParameter( prog, gl.LINK_STATUS );
if ( !status ) alert("Could not initialise shaders");
gl.useProgram( null );
return status ? prog : null;
}
var VertexBuffer = {
Create: function(attribs, indices) {
var buffer = { buf: [], attr: [], inx: gl.createBuffer(), inxLen: indices.length };
for (var i=0; i<attribs.length; ++i) {
buffer.buf.push(gl.createBuffer());
buffer.attr.push({ size : attribs[i].attrSize, loc : attribs[i].attrLoc });
gl.bindBuffer(gl.ARRAY_BUFFER, buffer.buf[i]);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array( attribs[i].data ), gl.STATIC_DRAW);
}
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer.inx);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( indices ), gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
return buffer;
},
Draw: function(bufObj) {
for (var i=0; i<bufObj.buf.length; ++i) {
gl.bindBuffer(gl.ARRAY_BUFFER, bufObj.buf[i]);
gl.vertexAttribPointer(bufObj.attr[i].loc, bufObj.attr[i].size, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray( bufObj.attr[i].loc);
}
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, bufObj.inx);
gl.drawElements(bufObj.primitve_type ? bufObj.primitve_type : gl.TRIANGLES, bufObj.inxLen, gl.UNSIGNED_SHORT, 0);
for (var i=0; i<bufObj.buf.length; ++i)
gl.disableVertexAttribArray(bufObj.attr[i].loc);
gl.bindBuffer( gl.ARRAY_BUFFER, null );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, null );
} };
initScene();
})();
html,body { margin: 0; overflow: hidden; }
#gui { position : absolute; top : 0; left : 0; }
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec3 inPos;
attribute vec3 inNV;
attribute vec3 inCol;
varying vec3 vertPos;
varying vec3 vertNV;
varying vec3 vertCol;
uniform mat4 u_projectionMat44;
uniform mat4 u_modelViewMat44;
void main()
{
vertNV = mat3( u_modelViewMat44 ) * normalize( inNV );
vertCol = inCol;
vec4 pos = u_modelViewMat44 * vec4( inPos, 1.0 );
vertPos = pos.xyz / pos.w;
gl_Position = u_projectionMat44 * pos;
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec3 vertPos;
varying vec3 vertNV;
varying vec3 vertCol;
uniform float u_shininess;
void main()
{
vec3 color = vertCol;
vec3 normalV = normalize( vertNV );
vec3 eyeV = normalize( -vertPos );
vec3 halfV = normalize( eyeV + normalV );
float NdotH = max( 0.0, dot( normalV, halfV ) );
float shineFac = ( u_shininess + 2.0 ) * pow( NdotH, u_shininess ) / ( 2.0 * 3.14159265 );
gl_FragColor = vec4( color.rgb * (0.2 + NdotH), 1.0 );
}
</script>
<script id="post-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
varying vec2 pos;
void main()
{
pos = inPos;
gl_Position = vec4( inPos, 0.0, 1.0 );
}
</script>
<script id="blur-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 pos;
uniform sampler2D u_texture;
uniform vec2 u_textureSize;
uniform float u_sigma;
uniform float u_radius;
uniform vec2 u_dir;
float CalcGauss( float x, float sigma )
{
if ( sigma <= 0.0 )
return 0.0;
return exp( -(x*x) / (2.0 * sigma) ) / (2.0 * 3.14157 * sigma);
}
void main()
{
vec2 texC = pos.st * 0.5 + 0.5;
vec4 texCol = texture2D( u_texture, texC );
vec4 gaussCol = vec4( texCol.rgb, 1.0 );
vec2 step = u_dir / u_textureSize;
for ( int i = 1; i <= 32; ++ i )
{
float weight = CalcGauss( float(i) / 32.0, u_sigma * 0.5 );
if ( weight < 1.0/255.0 )
break;
texCol = texture2D( u_texture, texC + u_radius * step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
texCol = texture2D( u_texture, texC - u_radius * step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
}
gaussCol.rgb = clamp( gaussCol.rgb / gaussCol.w, 0.0, 1.0 );
gl_FragColor = vec4( gaussCol.rgb, 1.0 );
}
</script>
<div>
<form id="gui" name="inputs">
<table>
<tr> <td> <font color= #CCF>radius</font> </td>
<td> <input type="range" id="radius" min="1" max="1000" value="350" onchange="changeEventHandler(event);"/></td> </tr>
<tr> <td> <font color= #CCF>blur</font> </td>
<td> <input type="range" id="sigma" min="1" max="100" value="5" onchange="changeEventHandler(event);"/></td> </tr>
</table>
</form>
</div>
<canvas id="canvas" style="border: none;"></canvas>