Shader - Anti fisheye by "pulling" pixels - glsl

I would like to fix the distortion of this image :
Sorry for the quality, but this is the best example I could find.
I don't know if fixing this distortion is possible (I would like to have straight doors, and a straight ceiling), but basically, instead of pushing the pixels outside of the image (red arrows) that is adding a blur effect, I would like to do the opposite (green arrows), to pull the pixels towards the center.
If you have any idea, that would be awesome. Other solutions are welcomed as well !

For the definition of the fisheye effect, you've to associate an angle (alpha) to the diagonal (d) of the viewport.
The diagonal of the viewport is the diameter of of circle which includes the entire viewport.
The relation between the radius of the perimeter circle (r) and the angle (alpha) is:
r = tan(alpha / 2)
In the following eye_angle corresponds to alpha and half_dist to r:
float half_angle = eye_angle/2.0;
float half_dist = tan(half_angle);
With the the aspect ration of the viewport (aspect) and is the normalized device position of the fragment on the viewport ndcPos the position P can be calculated. In normalized device space the x and y are in range [-1, 1]:
vec2 vp_scale = vec2(aspect, 1.0);
vec2 P = ndcPos * vp_scale;
For each point (P) on the view port the relative distance (rel_dist) to the center of the viewport in relation to the perimeter circle has to be calculated. And the relative position (rel_P) of the point (P) in the relation to the aspect ration is needed:
float vp_dia = length(vp_scale);
float rel_dist = length(P) / vp_dia;
vec2 rel_P = normalize(P) / normalize(vp_scale);
The fisheye effect is caused by the projection of a spherical surface on a plane. To calculate from and to the projection, the relation between the arc length and the distance to the center of the plane has to be found:
If the radius of a circle is 1, the length of the arc is equal the angle of the arc segment in radians. So the relation between the distance to the point P and the angle beta is:
|P|/r = tan(beta)
beta = atan(|P|/r)
If the projection from the spherical surface to the plane is:
float beta = rel_dist * half_angle;
vec3 pos_prj = rel_P * tan(beta) / half_dist;
And the projection from the plane to the spherical surface is:
float beta = atan(rel_dist * half_dist);
vec2 pos_prj = rel_P * beta / half_angle;
See the following WebGL example which uses an fragment shader where the algorithm is implemented. The angle alpha is set be the uniform variable u_alpha.
If u_alpha > 0.0, then the projection form spherical surface to the plane is calculated.
If u_alpha < 0.0, then the projection from the plane to the spherical surface is calculated.
(function loadscene() {
var canvas, gl, vp_size, texture, prog, bufObj = {};
function initScene() {
canvas = document.getElementById( "ogl-canvas");
gl = canvas.getContext( "experimental-webgl" );
if ( !gl )
return;
texture = new Texture( "https://raw.githubusercontent.com/Rabbid76/graphics-snippets/master/resource/texture/supermario.jpg" );
texture.bound = false;
progDraw = gl.createProgram();
for (let i = 0; i < 2; ++i) {
let source = document.getElementById(i==0 ? "draw-shader-vs" : "draw-shader-fs").text;
let shaderObj = gl.createShader(i==0 ? gl.VERTEX_SHADER : gl.FRAGMENT_SHADER);
gl.shaderSource(shaderObj, source);
gl.compileShader(shaderObj);
let status = gl.getShaderParameter(shaderObj, gl.COMPILE_STATUS);
if (!status) alert(gl.getShaderInfoLog(shaderObj));
gl.attachShader(progDraw, shaderObj);
gl.linkProgram(progDraw);
}
status = gl.getProgramParameter(progDraw, gl.LINK_STATUS);
if ( !status ) alert(gl.getProgramInfoLog(progDraw));
progDraw.inPos = gl.getAttribLocation(progDraw, "inPos");
progDraw.u_time = gl.getUniformLocation(progDraw, "u_time");
progDraw.u_resolution = gl.getUniformLocation(progDraw, "u_resolution");
progDraw.u_texture = gl.getUniformLocation(progDraw, "u_texture");
progDraw.u_angle = gl.getUniformLocation(progDraw, "u_angle");
gl.useProgram(progDraw);
var pos = [ -1, -1, 1, -1, 1, 1, -1, 1 ];
var inx = [ 0, 1, 2, 0, 2, 3 ];
bufObj.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( pos ), gl.STATIC_DRAW );
bufObj.inx = gl.createBuffer();
bufObj.inx.len = inx.length;
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( inx ), gl.STATIC_DRAW );
gl.enableVertexAttribArray( progDraw.inPos );
gl.vertexAttribPointer( progDraw.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
window.onresize = resize;
resize();
requestAnimationFrame(render);
}
function resize() {
vp_size = [window.innerWidth, window.innerHeight];
canvas.width = vp_size[0];
canvas.height = vp_size[1];
}
function render(deltaMS) {
scale = document.getElementById( "scale" ).value / 100 * 2.0 - 1.0;
gl.viewport( 0, 0, canvas.width, canvas.height );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
texture.bound = texture.bound || texture.bind( 0 );
gl.uniform1i(progDraw.u_texture, 0);
gl.uniform1f(progDraw.u_time, deltaMS/2000.0);
gl.uniform2f(progDraw.u_resolution, canvas.width, canvas.height);
gl.uniform1f(progDraw.u_angle, scale * Math.PI * 0.9);
gl.drawElements( gl.TRIANGLES, bufObj.inx.len, gl.UNSIGNED_SHORT, 0 );
requestAnimationFrame(render);
}
class Texture {
constructor( name, dflt ) {
let texture = this;
this.dflt = dflt || [128,128,128,255]
let image = { "cx": this.dflt.w || 1, "cy": this.dflt.h || 1, "plane": this.dflt.p || this.dflt };
this.size = [image.cx, image.cy];
this.dummyObj = Texture.createTexture2D( image, true )
this.image = new Image(64,64);
this.image.setAttribute('crossorigin', 'anonymous');
this.image.onload = function () {
let cx = 1 << 31 - Math.clz32(texture.image.naturalWidth);
if ( cx < texture.image.naturalWidth ) cx *= 2;
let cy = 1 << 31 - Math.clz32(texture.image.naturalHeight);
if ( cy < texture.image.naturalHeight ) cy *= 2;
var canvas = document.createElement( 'canvas' );
canvas.width = cx;
canvas.height = cy;
var context = canvas.getContext( '2d' );
context.drawImage( texture.image, 0, 0, canvas.width, canvas.height );
texture.textureObj = Texture.createTexture2D( canvas, true );
texture.size = [cx, cy];
}
this.image.src = name;
}
static createTexture2D( image, flipY ) {
let t = gl.createTexture();
gl.activeTexture( gl.TEXTURE0 );
gl.bindTexture( gl.TEXTURE_2D, t );
gl.pixelStorei( gl.UNPACK_FLIP_Y_WEBGL, flipY != undefined && flipY == true );
if ( image.cx && image.cy && image.plane )
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, image.cx, image.cy, 0, gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array(image.plane) );
else
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.REPEAT );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.REPEAT );
gl.bindTexture( gl.TEXTURE_2D, null );
return t;
}
bind( texUnit = 0 ) {
gl.activeTexture( gl.TEXTURE0 + texUnit );
if ( this.textureObj ) {
gl.bindTexture( gl.TEXTURE_2D, this.textureObj );
return true;
}
gl.bindTexture( gl.TEXTURE_2D, this.dummyObj );
return false;
}
};
initScene();
})();
html,body { margin: 0; overflow: hidden; }
#gui { position : absolute; top : 0; left : 0; }
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
uniform float u_time;
uniform vec2 u_resolution;
uniform float u_angle;
uniform sampler2D u_texture;
void main()
{
vec2 uv = gl_FragCoord.xy / u_resolution;
vec2 ndcPos = uv * 2.0 - 1.0;
float aspect = u_resolution.x / u_resolution.y;
float eye_angle = abs(u_angle);
float half_angle = eye_angle/2.0;
float half_dist = tan(half_angle);
vec2 vp_scale = vec2(aspect, 1.0);
vec2 P = ndcPos * vp_scale;
float vp_dia = length(vp_scale);
float rel_dist = length(P) / vp_dia;
vec2 rel_P = normalize(P) / normalize(vp_scale);
vec2 pos_prj = ndcPos;
if (u_angle > 0.0)
{
float beta = rel_dist * half_angle;
pos_prj = rel_P * tan(beta) / half_dist;
}
else if (u_angle < 0.0)
{
float beta = atan(rel_dist * half_dist);
pos_prj = rel_P * beta / half_angle;
}
vec2 uv_prj = pos_prj * 0.5 + 0.5;
vec2 rangeCheck = step(vec2(0.0), uv_prj) * step(uv_prj, vec2(1.0));
if (rangeCheck.x * rangeCheck.y < 0.5)
discard;
vec4 texColor = texture2D(u_texture, uv_prj.st);
gl_FragColor = vec4( texColor.rgb, 1.0 );
}
</script>
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
void main()
{
gl_Position = vec4( inPos.xy, 0.0, 1.0 );
}
</script>
<canvas id="ogl-canvas" style="border: none"></canvas>
<form id="gui" name="inputs">
<input type="range" id="scale" min="0" max="100" value="20"/>
</form>

This kind of distortion is a property of a camera. If you have access to the camera used to take the picture, you can calibrate it in order to extract the deformation it applies. Otherwise, you can still use some general equations. Have a look at Paul Bourke's article, this Stack Overflow answer or this Shader Toy example, to see how it could be done.

Related

How do I set up multiple viewports using Qt3D without having to use QML

I'd like to set up a multiple view port scene akin to https://doc.qt.io/archives/qt-5.10/qt3d-multiviewport-example.html
without having to use QML.
At the moment I've got a single view working with:
Qt3DExtras::Qt3DWindow* createView ( Qt3DCore::QEntity* rootEntity ) {
Qt3DExtras::Qt3DWindow* view = new Qt3DExtras::Qt3DWindow();
Qt3DRender::QCamera* camera = view->camera();
camera->lens()->setPerspectiveProjection ( 45.0f, 16.0f / 9.0f, 0.1f, 1000.0f );
camera->setPosition ( QVector3D ( 0, 0, 10.0f ) );
camera->setViewCenter ( QVector3D ( 0, 0, 0 ) );
Qt3DExtras::QOrbitCameraController* manipulator = new Qt3DExtras::QOrbitCameraController ( rootEntity );
manipulator->setLinearSpeed ( 5.0f );
manipulator->setLookSpeed ( 180.f );
manipulator->setZoomInLimit ( 5.0f );
manipulator->setCamera ( camera );
return view;
}
In essence, the question is how do I convert the QML code supplied in the example to C++.
You have to use QViewport class and setNormalizedRect function to specify with in [0.0, 0.0, 1.0, 1.0].
Something like below.
//DECLARE A FRAME GRAPH
Qt3DRender::QFrameGraph *frameGraph = new Qt3DRender::QFrameGraph();
Qt3DRender::QTechniqueFilter *techniqueFilter = new Qt3DRender::QTechniqueFilter();
Qt3DRender::QViewport *viewport1 = new Qt3DRender::QViewport(techniqueFilter);
Qt3DRender::QViewport *viewport2 = new Qt3DRender::QViewport(techniqueFilter);
Qt3DRender::QViewport *viewport3 = new Qt3DRender::QViewport(techniqueFilter);
Qt3DRender::QViewport *viewport4 = new Qt3DRender::QViewport(techniqueFilter);
viewport1->setNormalizedRect(QRectF(0, 0, 0.5, 0.5));
viewport2->setNormalizedRect(QRectF(0.5, 0, 0.5, 0.5));
viewport3->setNormalizedRect(QRectF(0, 0.5, 0.5, 0.5));
viewport4->setNormalizedRect(QRectF(0.5, 0.5, 0.5, 0.5));
//SET ACTIVE FRAME GRAPH NODE TO YOUR WINDOW
your_qt3d_wndObject->setActiveFrameGraph(frameGraph->activeFrameGraph());
https://doc-snapshots.qt.io/qt5-5.9/qt3drender-qviewport.html

OpenGL texture mapping not using image [closed]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 4 years ago.
Improve this question
I want to map textures to an object(maybe cube).
But i don't want to use image.
I know that usually using image for texture mapping like this:
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
image = SOIL_load_image("cube.jpg", &width, &height, 0, SOIL_LOAD_RGB);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, image);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
But Is there any solution that not using image?
If there is color spectrum, I want to store the color information in a buffer and map this spectrum like a texture.
Please Let me know If there is any solution.
If you want to create a texture you have to allocate the memory for the color plane first.
The easiest way is to create an RGBA texture, because for one texel exactly 4 byte (32 bit) is needed and you do not have to worry about the alignment.
In c++ I recommend to use a std::vector:
int width = ...;
int height = ...;
std::vector<unsignd char> colorPlane( width * height * 4 ); // * 4 because of RGBA
But you can also use old school dynamic memory allocation:
unsignd char *colorPlane = new unsignd char[ width * height * 4 ];
The byte index of a texel inside the plane is calculated as follows:
int posX = ...;
int posY = ...;
int index = (posY * width + posX) * 4;
If you want to set a pixel you have to assign the proper red, green and blue color channels in the range [0, 255]. For an opaque texel you have to set an alpha channel of 255:
e.g.: Set Red color:
colorPlane[index + 0] = 255; // red component
colorPlane[index + 1] = 0; // green component
colorPlane[index + 2] = 0; // blue component
colorPlane[index + 3] = 255; // alpha channel (255 == opaque)
Finally you have to set the color plane to the texture.
// std::vector<unsigned char>
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, colorPlane.data() );
// unsigned char*
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, colorPlane );
You can create a gradient texture by interpolating the color components.
See the following example
for ( int iy = 0; iy < height; ++ iy )
{
for ( int ix = 0; ix < width; ++ ix )
{
int index = (iy * width + ix) * 4;
float gradX = (float)ix / width;
float gradY = (float)iy / height;
colorPlane[index + 0] = (unsigned char)(255.0 * (1.0-gradX));
colorPlane[index + 1] = (unsigned char)(255.0 * (1.0-gradY));
colorPlane[index + 2] = (unsigned char)(255.0 * gradX * gradY);
colorPlane[index + 3] = 255;
}
}
A rainbow texture can be created as follows:
for ( int iy = 0; iy < height; ++ iy )
{
for ( int ix = 0; ix < width; ++ ix )
{
int index = (iy * width + ix) * 4;
float H = 1.0f - (float)iy / height;
float R = fabs(H * 4.0f - 3.0f) - 1.0f;
float G = 2.0f - fabs(H * 4.0f - 2.0f);
float B = 2.0f - fabs(H * 4.0f - 4.0f);
colorPlane[index + 0] = (unsigned char)(255.0 * R);
colorPlane[index + 1] = (unsigned char)(255.0 * G);
colorPlane[index + 2] = (unsigned char)(255.0 * B);
colorPlane[index + 3] = 255;
}
}
See the following WebGL example:
var ShaderProgram = {};
ShaderProgram.Create = function( shaderList, uniformNames ) {
var shaderObjs = [];
for ( var i_sh = 0; i_sh < shaderList.length; ++ i_sh ) {
var shderObj = this.CompileShader( shaderList[i_sh].source, shaderList[i_sh].stage );
if ( shderObj == 0 )
return 0;
shaderObjs.push( shderObj );
}
var progObj = this.LinkProgram( shaderObjs )
if ( progObj != 0 ) {
progObj.unifomLocation = {};
for ( var i_n = 0; i_n < uniformNames.length; ++ i_n ) {
var name = uniformNames[i_n];
progObj.unifomLocation[name] = gl.getUniformLocation( progObj, name );
}
}
return progObj;
}
ShaderProgram.Use = function( progObj ) { gl.useProgram( progObj ); }
ShaderProgram.SetUniformInt = function( progObj, name, val ) { gl.uniform1i( progObj.unifomLocation[name], val ); }
ShaderProgram.SetUniformFloat = function( progObj, name, val ) { gl.uniform1f( progObj.unifomLocation[name], val ); }
ShaderProgram.SetUniform2f = function( progObj, name, arr ) { gl.uniform2fv( progObj.unifomLocation[name], arr ); }
ShaderProgram.SetUniform3f = function( progObj, name, arr ) { gl.uniform3fv( progObj.unifomLocation[name], arr ); }
ShaderProgram.SetUniformMat44 = function( progObj, name, mat ) { gl.uniformMatrix4fv( progObj.unifomLocation[name], false, mat ); }
ShaderProgram.CompileShader = function( source, shaderStage ) {
var shaderScript = document.getElementById(source);
if (shaderScript) {
source = "";
var node = shaderScript.firstChild;
while (node) {
if (node.nodeType == 3) source += node.textContent;
node = node.nextSibling;
}
}
var shaderObj = gl.createShader( shaderStage );
gl.shaderSource( shaderObj, source );
gl.compileShader( shaderObj );
var status = gl.getShaderParameter( shaderObj, gl.COMPILE_STATUS );
if ( !status ) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : 0;
}
ShaderProgram.LinkProgram = function( shaderObjs ) {
var prog = gl.createProgram();
for ( var i_sh = 0; i_sh < shaderObjs.length; ++ i_sh )
gl.attachShader( prog, shaderObjs[i_sh] );
gl.linkProgram( prog );
status = gl.getProgramParameter( prog, gl.LINK_STATUS );
if ( !status ) alert("Could not initialise shaders");
gl.useProgram( null );
return status ? prog : 0;
}
function drawScene(){
var canvas = document.getElementById( "ogl-canvas" );
var vp = [canvas.width, canvas.height];
gl.viewport( 0, 0, canvas.width, canvas.height );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
ShaderProgram.Use( progDraw );
gl.enableVertexAttribArray( progDraw.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.vertexAttribPointer( progDraw.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
gl.drawElements( gl.TRIANGLES, bufObj.inx.len, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progDraw.pos );
}
var gl;
var prog;
var bufObj = {};
function sceneStart() {
var canvas = document.getElementById( "ogl-canvas");
gl = canvas.getContext( "experimental-webgl" );
if ( !gl )
return;
progDraw = ShaderProgram.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
],
[] );
progDraw.inPos = gl.getAttribLocation( progDraw, "inPos" );
if ( prog == 0 )
return;
var pos = [ -1, -1, 1, -1, 1, 1, -1, 1 ];
var inx = [ 0, 1, 2, 0, 2, 3 ];
bufObj.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufObj.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( pos ), gl.STATIC_DRAW );
bufObj.inx = gl.createBuffer();
bufObj.inx.len = inx.length;
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufObj.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( inx ), gl.STATIC_DRAW );
setInterval(drawScene, 50);
}
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
varying vec2 vertPos;
void main()
{
vertPos = inPos;
gl_Position = vec4( inPos.xy, 0.0, 1.0 );
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 vertPos;
vec3 HueToRGB(in float H)
{
float R = abs(H * 4.0 - 3.0) - 1.0;
float G = 2.0 - abs(H * 4.0 - 2.0);
float B = 2.0 - abs(H * 4.0 - 4.0);
return clamp( vec3(R,G,B), 0.0, 1.0 );
}
void main()
{
vec3 color = HueToRGB( vertPos.y * 0.5 + 0.5 );
gl_FragColor = vec4( color, 1.0 );
}
</script>
<body onload="sceneStart();">
<canvas id="ogl-canvas" style="border: none;" width="200" height="200"></canvas>
</body>
This is possible in fragment (pixel) shader. Anyway - your original gradient data will better work if it will be presented as a texture to a shader with additional parameters (like linear gradient points, or radial gradient params).
However, you still pass it as uniform buffer or texture buffer object. With this you loose built-in texture sampling and filtering capabilities, which will almost do gradient for you.
Take a look at shader toy (https://www.shadertoy.com) - there's tons of samples without usng textures and generating patterns inside a shader program.

OpenGL es 2.0 Gaussian blur on triangle

I recently learn opengl es 2.0, and now I try to make a gaussian blur on triangles generate by myself. I have some difficult to understand examples on the web and most apply the blur on an image. I know I have to use framebuffer but I don't know how to draw triangle on this and apply blur.
Is it possible to see a real and complete code in C++ with good explication ?
EDIT :
#include <stdio.h>
#include <stdlib.h>
#include <iostream>
#define GLFW_INCLUDE_ES2
#include <GLFW/glfw3.h>
#include "shaders.hpp"
#include "camera.hpp"
unsigned int vbo, cbo, tbo;
GLuint _fbo, _fbo2, _tex, _tex2;
static const GLuint WIDTH = 800;
static const GLuint HEIGHT = 600;
GLuint pos, col, tex, normal;
camera * _camera = new camera();
static const GLfloat vertices[] = {
0.0f, 1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
-1.0f, -1.0f, 0.0f
};
static const GLfloat colors[] = {
0.0f, 0.5f, 1.0f,
0.5f, 0.5f, 1.0f,
0.5f, 0.5f, 1.0f
};
static const GLfloat texture[] = {
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f
};
int main(void){
GLFWwindow* window;
shaders * shaderBasic;
GLuint pId;
glm::mat4 projection; static glm::mat4 view; static glm::mat4 model;
glfwInit();
glfwWindowHint(GLFW_CLIENT_API, GLFW_OPENGL_ES_API);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
window = glfwCreateWindow(WIDTH, HEIGHT, __FILE__, NULL, NULL);
glfwMakeContextCurrent(window);
printf("GL_VERSION : %s\n", glGetString(GL_VERSION) );
printf("GL_RENDERER : %s\n", glGetString(GL_RENDERER) );
std::string vs, fs;
vs = "basic.vs";
fs = "basic.fs";
shaderBasic = new shaders(vs, fs);
shaderBasic->CompileShader();
shaderBasic->LinkShader();
pId = shaderBasic->getProgramId();
pos = glGetAttribLocation(pId, "position");
col = glGetAttribLocation(pId, "colors");
tex = glGetAttribLocation(pId, "tex");
fs = "lastBlur.fs";
shaders * blurShader;
GLuint pIdBlur;
blurShader = new shaders(vs, fs);
blurShader->CompileShader();
blurShader->LinkShader();
pIdBlur = blurShader->getProgramId();
_camera->setPositionCamera(glm::vec3(0, 0, -1));
_camera->setLookAtCamera(glm::vec3(0, 0, 0));
_camera->setFieldOfView(45);
_camera->setAspect(WIDTH, HEIGHT);
_camera->setViewport(WIDTH, HEIGHT);
_camera->getMatricies(projection, view, model);
glGenFramebuffers(1, &_fbo);
glGenTextures(1, &_tex);
glBindFramebuffer(GL_FRAMEBUFFER, _fbo);
glBindTexture(GL_TEXTURE_2D, _tex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, WIDTH/2, HEIGHT/2, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, _tex, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
std::cout << "ERROR::FRAMEBUFFER:: Framebuffer is not complete!" << std::endl;
else{
std::cout << "FRAMEBUFFER COMPLETE" << std::endl;
}
auto sampTex = glGetUniformLocation(pIdBlur, "texture0");
std::cerr << "sampTex : " << sampTex << std::endl;
glUniform1i(sampTex, 0);
while (!glfwWindowShouldClose(window)) {
// glViewport(0, 0, WIDTH, HEIGHT);
glBindFramebuffer(GL_FRAMEBUFFER, _fbo);
glClearColor(0.0f, 0.0f, 0.4f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// glViewport(0, 0, WIDTH/2, HEIGHT/2);
glUseProgram(pIdBlur);
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(pos, 3, GL_FLOAT, false, 0, 0);
glEnableVertexAttribArray(pos);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glGenBuffers(1, &cbo);
glBindBuffer(GL_ARRAY_BUFFER, cbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(colors), colors, GL_STATIC_DRAW);
glVertexAttribPointer(col, 2, GL_FLOAT, false, 0, 0);
glEnableVertexAttribArray(col);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glGenBuffers(1, &tbo);
glBindBuffer(GL_ARRAY_BUFFER, tbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(texture), texture, GL_STATIC_DRAW);
glVertexAttribPointer(tex, 2, GL_FLOAT, false, 0, 0);
glEnableVertexAttribArray(tex);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(pId);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _tex);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwPollEvents();
glfwSwapBuffers(window);
}
glDeleteBuffers(1, &vbo);
glfwTerminate();
return EXIT_SUCCESS;
}
Blur Shader:
#version 100
precision mediump float;
uniform sampler2D texture0;
varying vec3 vColor;
varying vec2 TexCoords;
vec4 blur13(sampler2D image, vec2 uv, vec2 resolution, vec2 direction) {
vec4 color = vec4(0.0);
vec2 off1 = vec2(1.411764705882353) * direction;
vec2 off2 = vec2(3.2941176470588234) * direction;
vec2 off3 = vec2(5.176470588235294) * direction;
color += texture2D(image, uv) * 0.1964825501511404;
color += texture2D(image, uv + (off1 / resolution)) * 0.2969069646728344;
color += texture2D(image, uv - (off1 / resolution)) * 0.2969069646728344;
color += texture2D(image, uv + (off2 / resolution)) * 0.09447039785044732;
color += texture2D(image, uv - (off2 / resolution)) * 0.09447039785044732;
color += texture2D(image, uv + (off3 / resolution)) * 0.010381362401148057;
color += texture2D(image, uv - (off3 / resolution)) * 0.010381362401148057;
return color;
}
void main(){
gl_FragColor = blur13(texture0, TexCoords, vec2(400, 300), vec2(1.0, 0.0));
}
I assume you have swapped pIdBlur and pId.
I' will give you introductions for gaussian blur shader with 2 passes. This is an approximation which first blurs along the X-Axis in the 1st pass and along the Y-Axis in the 2nd pass. This results in a better performance for strong blurring.
The blur shader uses a normal (or Gaussian) distribution. For the 2 passes is used the same shader program, with individual direction settings for the 2 passes, stored in the uniform vec2 u_dir. The strength of the blur effect can be varied with the uniform variable float u_sigma in the range [0.0, 1.0].
Blur Vertex shader
precision mediump float;
attribute vec2 inPos;
varying vec2 pos;
void main()
{
pos = inPos;
gl_Position = vec4( inPos, 0.0, 1.0 );
}
Blur Fragment shader
precision mediump float;
varying vec2 pos;
uniform sampler2D u_texture;
uniform vec2 u_textureSize;
uniform float u_sigma;
uniform vec2 u_dir;
float CalcGauss( float x, float sigma )
{
if ( sigma <= 0.0 )
return 0.0;
return exp( -(x*x) / (2.0 * sigma) ) / (2.0 * 3.14157 * sigma);
}
void main()
{
vec2 texC = pos.st * 0.5 + 0.5;
vec4 texCol = texture2D( u_texture, texC );
vec4 gaussCol = vec4( texCol.rgb, 1.0 );
vec2 step = u_dir / u_textureSize;
for ( int i = 1; i <= 32; ++ i )
{
float weight = CalcGauss( float(i) / 32.0, u_sigma * 0.5 );
if ( weight < 1.0/255.0 )
break;
texCol = texture2D( u_texture, texC + step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
texCol = texture2D( u_texture, texC - step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
}
gaussCol.rgb = clamp( gaussCol.rgb / gaussCol.w, 0.0, 1.0 );
gl_FragColor = vec4( gaussCol.rgb, 1.0 );
}
After the program has been linked the uniform locations and attribute indices can be read from:
GLint attrInxPos = glGetAttribLocation( pIdBlur, "inPos" );
GLint locTexture = glGetUniformLocation( pIdBlur, "u_texture" );
GLint locTexSize = glGetUniformLocation( pIdBlur, "u_textureSize" );
GLint locSigma = glGetUniformLocation( pIdBlur, "u_sigma" );
GLint locDir = glGetUniformLocation( pIdBlur, "u_dir" );
A vertex array object, containing a quad, which later will be drawn over the whole viewport, for a screen space blur pass, has to be created:
GLuint screenVAO;
glGenVertexArrays( 1, &screenVAO );
glBindVertexArray( screenVAO );
GLuint quadBuf;
glGenBuffers( 1, &quadBuf );
glBindBuffer( GL_ARRAY_BUFFER, quadBuf );
GLfloat screenRect[] = { -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f };
glBufferData( GL_ARRAY_BUFFER, 8 * sizeof( float ), screenRect, GL_STATIC_DRAW );
glEnableVertexAttribArray( attrInxPos );
glVertexAttribPointer( attrInxPos, 2, GL_FLOAT, GL_FALSE, 0, nullptr );
2 frame buffers, with a texture attached to its color plane, have to be created. In the 1st one the scene is drawn. The
2nd one is used by the 1st blur pass. The 2nd blur pass draws directly to the drawing buffer.
GLuint texObj[2];
GLuint fbObj[2];
glGenTextures(2, texObj);
glGenFramebuffers(2, fbObj);
glActiveTexture(GL_TEXTURE0);
for ( int i = 0; i < 2; i ++ )
{
glBindTexture(GL_TEXTURE_2D, texObj[i]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
glBindFramebuffer(GL_FRAMEBUFFER, fbObj[i]);
glFramebufferTexture2D( GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texObj[i], 0 );
GLuint renderbuffer;
glGenRenderbuffers(1, &renderbuffer);
glBindRenderbuffer( GL_RENDERBUFFER, renderbuffer );
glRenderbufferStorage( GL_RENDERBUFFER, GL_DEPTH_COMPONENT24, width, height );
glFramebufferRenderbuffer( GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, renderbuffer );
}
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
Now everything what is needed for the blur passes has been generated.
To draw and blur the scene the following steps have to be applied.
First you have to bind and clear the 1st frame buffer
glBindFramebuffer(GL_FRAMEBUFFER, fbObj[0]);
glClearColor(0.0f, 0.0f, 0.4f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
and use the shader program for drawing the objects:
glUseProgram(pId);
Now draw the object(s) of the scene.
.....
glDrawArrays(GL_TRIANGLES, 0, 3);
The second step is the 1st blur pass. The blur program has to be use and the 2nd framebuffer has to be bound.
After the frame 1st buffer has been released, you can use the texture, that is attached to its color plane, as an input for the blur shader. Note, a texture can't be source and destination at the same time, this would cause undefined behavior.
To bind the texture to the shader, you have to bind the texture to a texture unit and assign the index of the texture unit to the uniform sampler of the shader.
int texUnitIndex = 1;
GLfloat texSize = { width, height };
GLfloat dirX[] = { 1.0f, 0.0f };
GLfloat sigma = .....; // 0.0 <= sigma <= 1.0
glBindFramebuffer(GL_FRAMEBUFFER, fbObj[1]);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(pIdBlur);
glActiveTexture(GL_TEXTURE0 + texUnitIndex);
glBindTexture(GL_TEXTURE_2D, texObj[0]);
glUniform1i(locTexture, texUnitIndex);
glUniform2fv(locTexSize, texSize);
glUniform2fv(locTexSize, dirX);
glUniform1f(locTexSize, sigma);
To apply the blur pass a quad has to be drawn of the viewport area.
glBindVertexArray( screenVAO );
glDrawArrays( GL_TRIANGLE_STRIP, 0, 4 );
The 2nd and final blur pass, is similar to the 1st blur pass. The target texture of the 1st blur pass is the source texture, and the target is the drawing buffer. The blur direction has to be set up for the Y axis of the viewport.
GLfloat dirY[] = { 0.0f, 1.0f };
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, texObj[1]);
glUniform2fv(locTexSize, dirY);
See also the answers to the following question:
How to get a "Glow" shader effect in OpenGL ES 2.0?
What kind of blurs can be implemented in pixel shaders?
See alos a similar WebGL example:
(function loadscene() {
var resize, gl, progDraw, progBlur, vp_size, blurFB;
var canvas, camera, bufCube = {}, bufQuad = {};
var shininess = 10.0, glow = 10.0, sigma = 0.8, radius = 1.0;
function render(deltaMS){
var sliderScale = 100;
sigma = document.getElementById( "sigma" ).value / sliderScale;
radius = document.getElementById( "radius" ).value / sliderScale;
vp_size = [canvas.width, canvas.height];
camera.Update( vp_size );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
// set up framebuffer
gl.bindFramebuffer( gl.FRAMEBUFFER, blurFB[0] );
gl.viewport( 0, 0, blurFB[0].width, blurFB[0].height );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
// setup view projection and model
var prjMat = camera.Perspective();
var viewMat = camera.LookAt();
var modelMat = RotateAxis( IdentM44(), Fract( deltaMS / 13000.0 ) * 2.0 * Math.PI, 0 );
modelMat = RotateAxis( modelMat, Fract( deltaMS / 17000.0 ) * 2.0 * Math.PI, 1 );
// set up draw shader
ShProg.Use( progDraw );
ShProg.SetM44( progDraw, "u_projectionMat44", prjMat );
ShProg.SetM44( progDraw, "u_modelViewMat44", Multiply(viewMat, modelMat) );
ShProg.SetF1( progDraw, "u_shininess", shininess );
// draw scene
VertexBuffer.Draw( bufCube );
// set blur-X framebuffer and bind frambuffer texture
gl.bindFramebuffer( gl.FRAMEBUFFER, blurFB[1] );
gl.viewport( 0, 0, blurFB[1].width, blurFB[1].height );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
var texUnit = 1;
gl.activeTexture( gl.TEXTURE0 + texUnit );
gl.bindTexture( gl.TEXTURE_2D, blurFB[0].color0_texture );
// set up blur-X shader
ShProg.Use( progBlur );
ShProg.SetI1( progBlur, "u_texture", texUnit )
ShProg.SetF2( progBlur, "u_textureSize", vp_size );
ShProg.SetF1( progBlur, "u_sigma", sigma )
ShProg.SetF1( progBlur, "u_radius", radius )
ShProg.SetF2( progBlur, "u_dir", [1.0, 0.0] )
// draw full screen space
gl.enableVertexAttribArray( progBlur.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.vertexAttribPointer( progBlur.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.drawElements( gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progBlur.inPos );
// reset framebuffer and bind frambuffer texture
gl.bindFramebuffer( gl.FRAMEBUFFER, null );
gl.viewport( 0, 0, vp_size[0], vp_size[1] );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
texUnit = 2;
gl.activeTexture( gl.TEXTURE0 + texUnit );
gl.bindTexture( gl.TEXTURE_2D, blurFB[1].color0_texture );
// set up pst process shader
ShProg.SetI1( progBlur, "u_texture", texUnit )
ShProg.SetF1( progBlur, "u_radius", radius )
ShProg.SetF2( progBlur, "u_dir", [0.0, 1.0] )
// draw full screen space
gl.enableVertexAttribArray( progBlur.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.vertexAttribPointer( progBlur.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.drawElements( gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progBlur.inPos );
requestAnimationFrame(render);
}
function initScene() {
canvas = document.getElementById( "canvas");
gl = canvas.getContext( "experimental-webgl" );
if ( !gl )
return null;
progDraw = ShProg.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
if ( !progDraw.progObj )
return null;
progDraw.inPos = gl.getAttribLocation( progDraw.progObj, "inPos" );
progDraw.inNV = gl.getAttribLocation( progDraw.progObj, "inNV" );
progDraw.inCol = gl.getAttribLocation( progDraw.progObj, "inCol" );
progBlur = ShProg.Create(
[ { source : "post-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "blur-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
progBlur.inPos = gl.getAttribLocation( progBlur.progObj, "inPos" );
if ( !progBlur.progObj )
return;
// create cube
var cubePos = [
-1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 1.0,
-1.0, -1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0 ];
var cubeCol = [ 1.0, 0.0, 0.0, 1.0, 0.5, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 ];
var cubeHlpInx = [ 0, 1, 2, 3, 1, 5, 6, 2, 5, 4, 7, 6, 4, 0, 3, 7, 3, 2, 6, 7, 1, 0, 4, 5 ];
var cubePosData = [];
for ( var i = 0; i < cubeHlpInx.length; ++ i ) {
cubePosData.push( cubePos[cubeHlpInx[i]*3], cubePos[cubeHlpInx[i]*3+1], cubePos[cubeHlpInx[i]*3+2] );
}
var cubeNVData = [];
for ( var i1 = 0; i1 < cubeHlpInx.length; i1 += 4 ) {
var nv = [0, 0, 0];
for ( i2 = 0; i2 < 4; ++ i2 ) {
var i = i1 + i2;
nv[0] += cubePosData[i*3]; nv[1] += cubePosData[i*3+1]; nv[2] += cubePosData[i*3+2];
}
for ( i2 = 0; i2 < 4; ++ i2 )
cubeNVData.push( nv[0], nv[1], nv[2] );
}
var cubeColData = [];
for ( var is = 0; is < 6; ++ is ) {
for ( var ip = 0; ip < 4; ++ ip ) {
cubeColData.push( cubeCol[is*3], cubeCol[is*3+1], cubeCol[is*3+2] );
}
}
var cubeInxData = [];
for ( var i = 0; i < cubeHlpInx.length; i += 4 ) {
cubeInxData.push( i, i+1, i+2, i, i+2, i+3 );
}
bufCube = VertexBuffer.Create(
[ { data : cubePosData, attrSize : 3, attrLoc : progDraw.inPos },
{ data : cubeNVData, attrSize : 3, attrLoc : progDraw.inNV },
{ data : cubeColData, attrSize : 3, attrLoc : progDraw.inCol } ],
cubeInxData );
bufQuad.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( [ -1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0 ] ), gl.STATIC_DRAW );
bufQuad.inx = gl.createBuffer();
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( [ 0, 1, 2, 0, 2, 3 ] ), gl.STATIC_DRAW );
camera = new Camera( [0, 3, 0.0], [0, 0, 0], [0, 0, 1], 90, vp_size, 0.5, 100 );
window.onresize = resize;
resize();
requestAnimationFrame(render);
}
function resize() {
//vp_size = [gl.drawingBufferWidth, gl.drawingBufferHeight];
vp_size = [window.innerWidth, window.innerHeight]
//vp_size = [256, 256]
canvas.width = vp_size[0];
canvas.height = vp_size[1];
var fbsize = Math.max(vp_size[0], vp_size[1]);
fbsize = 1 << 31 - Math.clz32(fbsize); // nearest power of 2
blurFB = [];
for ( var i = 0; i < 2; ++ i ) {
fb = gl.createFramebuffer();
fb.width = fbsize;
fb.height = fbsize;
gl.bindFramebuffer( gl.FRAMEBUFFER, fb );
fb.color0_texture = gl.createTexture();
gl.bindTexture( gl.TEXTURE_2D, fb.color0_texture );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST );
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, fb.width, fb.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null );
fb.renderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer( gl.RENDERBUFFER, fb.renderbuffer );
gl.renderbufferStorage( gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, fb.width, fb.height );
gl.framebufferTexture2D( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, fb.color0_texture, 0 );
gl.framebufferRenderbuffer( gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, fb.renderbuffer );
gl.bindTexture( gl.TEXTURE_2D, null );
gl.bindRenderbuffer( gl.RENDERBUFFER, null );
gl.bindFramebuffer( gl.FRAMEBUFFER, null );
blurFB.push( fb );
}
}
function Fract( val ) {
return val - Math.trunc( val );
}
function CalcAng( deltaTime, intervall ) {
return Fract( deltaTime / (1000*intervall) ) * 2.0 * Math.PI;
}
function CalcMove( deltaTime, intervall, range ) {
var pos = self.Fract( deltaTime / (1000*intervall) ) * 2.0
var pos = pos < 1.0 ? pos : (2.0-pos)
return range[0] + (range[1] - range[0]) * pos;
}
function EllipticalPosition( a, b, angRag ) {
var a_b = a * a - b * b
var ea = (a_b <= 0) ? 0 : Math.sqrt( a_b );
var eb = (a_b >= 0) ? 0 : Math.sqrt( -a_b );
return [ a * Math.sin( angRag ) - ea, b * Math.cos( angRag ) - eb, 0 ];
}
function IdentM44() {
return [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ];
};
function RotateAxis(matA, angRad, axis) {
var aMap = [ [1, 2], [2, 0], [0, 1] ];
var a0 = aMap[axis][0], a1 = aMap[axis][1];
var sinAng = Math.sin(angRad), cosAng = Math.cos(angRad);
var matB = matA.slice(0);
for ( var i = 0; i < 3; ++ i ) {
matB[a0*4+i] = matA[a0*4+i] * cosAng + matA[a1*4+i] * sinAng;
matB[a1*4+i] = matA[a0*4+i] * -sinAng + matA[a1*4+i] * cosAng;
}
return matB;
}
function Rotate(matA, angRad, axis) {
var s = Math.sin(angRad), c = Math.cos(angRad);
var x = axis[0], y = axis[1], z = axis[2];
matB = [
x*x*(1-c)+c, x*y*(1-c)-z*s, x*z*(1-c)+y*s, 0,
y*x*(1-c)+z*s, y*y*(1-c)+c, y*z*(1-c)-x*s, 0,
z*x*(1-c)-y*s, z*y*(1-c)+x*s, z*z*(1-c)+c, 0,
0, 0, 0, 1 ];
return Multiply(matA, matB);
}
function Multiply(matA, matB) {
matC = IdentM44();
for (var i0=0; i0<4; ++i0 )
for (var i1=0; i1<4; ++i1 )
matC[i0*4+i1] = matB[i0*4+0] * matA[0*4+i1] + matB[i0*4+1] * matA[1*4+i1] + matB[i0*4+2] * matA[2*4+i1] + matB[i0*4+3] * matA[3*4+i1]
return matC;
}
function Cross( a, b ) { return [ a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0], 0.0 ]; }
function Dot( a, b ) { return a[0]*b[0] + a[1]*b[1] + a[2]*b[2]; }
function Normalize( v ) {
var len = Math.sqrt( v[0] * v[0] + v[1] * v[1] + v[2] * v[2] );
return [ v[0] / len, v[1] / len, v[2] / len ];
}
Camera = function( pos, target, up, fov_y, vp, near, far ) {
this.Time = function() { return Date.now(); }
this.pos = pos;
this.target = target;
this.up = up;
this.fov_y = fov_y;
this.vp = vp;
this.near = near;
this.far = far;
this.Perspective = function() {
var n = this.near;
var f = this.far;
var fn = f + n;
var f_n = f - n;
var r = this.vp[0] / this.vp[1];
var t = 1 / Math.tan( Math.PI * this.fov_y / 360 );
return [
t/r, 0, 0, 0,
0, t, 0, 0,
0, 0, -fn/f_n, -1,
0, 0, -2*f*n/f_n, 0 ];
};
this.LookAt = function() {
var mz = Normalize( [ this.pos[0]-this.target[0], this.pos[1]-this.target[1], this.pos[2]-this.target[2] ] );
var mx = Normalize( Cross( this.up, mz ) );
var my = Normalize( Cross( mz, mx ) );
var tx = Dot( mx, this.pos );
var ty = Dot( my, this.pos );
var tz = Dot( [-mz[0], -mz[1], -mz[2]], this.pos );
return [mx[0], my[0], mz[0], 0, mx[1], my[1], mz[1], 0, mx[2], my[2], mz[2], 0, tx, ty, tz, 1];
};
this.Update = function(vp_size) {
if (vp_size)
this.vp = vp_size;
};
}
var ShProg = {
Create: function (shaderList) {
var shaderObjs = [];
for (var i_sh = 0; i_sh < shaderList.length; ++i_sh) {
var shderObj = this.Compile(shaderList[i_sh].source, shaderList[i_sh].stage);
if (shderObj) shaderObjs.push(shderObj);
}
var prog = {}
prog.progObj = this.Link(shaderObjs)
if (prog.progObj) {
prog.attrInx = {};
var noOfAttributes = gl.getProgramParameter(prog.progObj, gl.ACTIVE_ATTRIBUTES);
for (var i_n = 0; i_n < noOfAttributes; ++i_n) {
var name = gl.getActiveAttrib(prog.progObj, i_n).name;
prog.attrInx[name] = gl.getAttribLocation(prog.progObj, name);
}
prog.uniLoc = {};
var noOfUniforms = gl.getProgramParameter(prog.progObj, gl.ACTIVE_UNIFORMS);
for (var i_n = 0; i_n < noOfUniforms; ++i_n) {
var name = gl.getActiveUniform(prog.progObj, i_n).name;
prog.uniLoc[name] = gl.getUniformLocation(prog.progObj, name);
}
}
return prog;
},
AttrI: function (prog, name) { return prog.attrInx[name]; },
UniformL: function (prog, name) { return prog.uniLoc[name]; },
Use: function (prog) { gl.useProgram(prog.progObj); },
SetI1: function (prog, name, val) { if (prog.uniLoc[name]) gl.uniform1i(prog.uniLoc[name], val); },
SetF1: function (prog, name, val) { if (prog.uniLoc[name]) gl.uniform1f(prog.uniLoc[name], val); },
SetF2: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform2fv(prog.uniLoc[name], arr); },
SetF3: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform3fv(prog.uniLoc[name], arr); },
SetF4: function (prog, name, arr) { if (prog.uniLoc[name]) gl.uniform4fv(prog.uniLoc[name], arr); },
SetM33: function (prog, name, mat) { if (prog.uniLoc[name]) gl.uniformMatrix3fv(prog.uniLoc[name], false, mat); },
SetM44: function (prog, name, mat) { if (prog.uniLoc[name]) gl.uniformMatrix4fv(prog.uniLoc[name], false, mat); },
Compile: function (source, shaderStage) {
var shaderScript = document.getElementById(source);
if (shaderScript)
source = shaderScript.text;
var shaderObj = gl.createShader(shaderStage);
gl.shaderSource(shaderObj, source);
gl.compileShader(shaderObj);
var status = gl.getShaderParameter(shaderObj, gl.COMPILE_STATUS);
if (!status) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : null;
},
Link: function (shaderObjs) {
var prog = gl.createProgram();
for (var i_sh = 0; i_sh < shaderObjs.length; ++i_sh)
gl.attachShader(prog, shaderObjs[i_sh]);
gl.linkProgram(prog);
status = gl.getProgramParameter(prog, gl.LINK_STATUS);
if ( !status ) alert(gl.getProgramInfoLog(prog));
return status ? prog : null;
} };
var VertexBuffer = {
Create: function(attribs, indices, type) {
var buffer = { buf: [], attr: [], inx: gl.createBuffer(), inxLen: indices.length, primitive_type: type ? type : gl.TRIANGLES };
for (var i=0; i<attribs.length; ++i) {
buffer.buf.push(gl.createBuffer());
buffer.attr.push({ size : attribs[i].attrSize, loc : attribs[i].attrLoc, no_of: attribs[i].data.length/attribs[i].attrSize });
gl.bindBuffer(gl.ARRAY_BUFFER, buffer.buf[i]);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array( attribs[i].data ), gl.STATIC_DRAW);
}
gl.bindBuffer(gl.ARRAY_BUFFER, null);
if ( buffer.inxLen > 0 ) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer.inx);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( indices ), gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
}
return buffer;
},
Draw: function(bufObj) {
for (var i=0; i<bufObj.buf.length; ++i) {
gl.bindBuffer(gl.ARRAY_BUFFER, bufObj.buf[i]);
gl.vertexAttribPointer(bufObj.attr[i].loc, bufObj.attr[i].size, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray( bufObj.attr[i].loc);
}
if ( bufObj.inxLen > 0 ) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, bufObj.inx);
gl.drawElements(bufObj.primitive_type, bufObj.inxLen, gl.UNSIGNED_SHORT, 0);
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, null );
}
else
gl.drawArrays(bufObj.primitive_type, 0, bufObj.attr[0].no_of );
for (var i=0; i<bufObj.buf.length; ++i)
gl.disableVertexAttribArray(bufObj.attr[i].loc);
gl.bindBuffer( gl.ARRAY_BUFFER, null );
} };
initScene();
})();
html,body { margin: 0; overflow: hidden; }
#gui { position : absolute; top : 0; left : 0; }
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec3 inPos;
attribute vec3 inNV;
attribute vec3 inCol;
varying vec3 vertPos;
varying vec3 vertNV;
varying vec3 vertCol;
uniform mat4 u_projectionMat44;
uniform mat4 u_modelViewMat44;
void main()
{
vertNV = mat3( u_modelViewMat44 ) * normalize( inNV );
vertCol = inCol;
vec4 pos = u_modelViewMat44 * vec4( inPos, 1.0 );
vertPos = pos.xyz / pos.w;
gl_Position = u_projectionMat44 * pos;
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec3 vertPos;
varying vec3 vertNV;
varying vec3 vertCol;
uniform float u_shininess;
void main()
{
vec3 color = vertCol;
vec3 normalV = normalize( vertNV );
vec3 eyeV = normalize( -vertPos );
vec3 halfV = normalize( eyeV + normalV );
float NdotH = max( 0.0, dot( normalV, halfV ) );
float shineFac = ( u_shininess + 2.0 ) * pow( NdotH, u_shininess ) / ( 2.0 * 3.14159265 );
gl_FragColor = vec4( color.rgb * (0.2 + NdotH), 1.0 );
}
</script>
<script id="post-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
varying vec2 pos;
void main()
{
pos = inPos;
gl_Position = vec4( inPos, 0.0, 1.0 );
}
</script>
<script id="blur-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 pos;
uniform sampler2D u_texture;
uniform vec2 u_textureSize;
uniform float u_sigma;
uniform float u_radius;
uniform vec2 u_dir;
float CalcGauss( float x, float sigma )
{
if ( sigma <= 0.0 )
return 0.0;
return exp( -(x*x) / (2.0 * sigma) ) / (2.0 * 3.14157 * sigma);
}
void main()
{
vec2 texC = pos.st * 0.5 + 0.5;
vec4 texCol = texture2D( u_texture, texC );
vec4 gaussCol = vec4( texCol.rgb, 1.0 );
vec2 step = u_dir / u_textureSize;
for ( int i = 1; i <= 32; ++ i )
{
float weight = CalcGauss( float(i) / 32.0, u_sigma * 0.5 );
if ( weight < 1.0/255.0 )
break;
texCol = texture2D( u_texture, texC + u_radius * step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
texCol = texture2D( u_texture, texC - u_radius * step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
}
gaussCol.rgb = clamp( gaussCol.rgb / gaussCol.w, 0.0, 1.0 );
gl_FragColor = vec4( gaussCol.rgb, 1.0 );
}
</script>
<div>
<form id="gui" name="inputs">
<table>
<tr> <td> <font color= #CCF>radius</font> </td>
<td> <input type="range" id="radius" min="1" max="1000" value="200"/></td> </tr>
<tr> <td> <font color= #CCF>blur</font> </td>
<td> <input type="range" id="sigma" min="1" max="50" value="10"/></td> </tr>
</table>
</form>
</div>
<canvas id="canvas" style="border: none;"></canvas>
In general you need to draw the scene you want to blur to a frame buffer object (FBO) with attached texture.
Create a frame buffer
Create an empty texture (data parameter should be null)
Bind frame buffer and texture
Attach the texture to frame buffer as color
At this point the rest of the drawing should be exactly the same as on your main buffer but make sure you setup a correct viewport. This procedure will make you draw to the texture.
Now that you have a texture with your scene you need to go to the same procedure as is for blurring an image.
Bind your main buffer (usually indexed 0)
Bind texture
Draw the texture to main buffer with blur shader
You can then optimize it with horizontal and vertical blur shaders having 2 calls which uses yet another FBO...
So I would try doing some steps in your application:
Create a scene which draws and shows a triangle
Create a FBO, draw to it and draw the FBO texture on your main buffer
Create a scene which draws and shows a blurred image through texture
Create a FBO on which the scene is drawn then draws a blurred texture from FBO on the main buffer
If you find yourself in trouble at any of these points you might want to ask a specific question about it.

Directx11: remove alpha blending for specific sprite

Background:
- I've been spending the last 6 weeks trying to learn Directx 11 programming.
I'm currently rendering dds images on-screen following this book
http://www.amazon.co.uk/Beginning-Directx-11-Game-Programming/dp/1435458958/ref=sr_1_1?ie=UTF8&qid=1346510304&sr=8-1
I am trying to draw 2D sprites upon a 2D background which in itself also is a 2D sprite. Problem is that these two lines
d3dDevice_->CreateBlendState( &blendDesc, &alphaBlendState_ );
d3dContext_->OMSetBlendState( alphaBlendState_, blendFactor, 0xFFFFFFFF );
to enable the alpha transparency in the dds images. I like to have the option to blend sprites but right now every sprites is blended with my blue background thus resulting in blu logos, icons etc.
How do I make the Alpha Blending ignore a specific image in directx 11?
Since the background sprite is the first to be drawn I can't activate the Blending later in the code when rendering the other sprites without every sprite turning blue.
thanks in advance :)
EDIT:
I've been working on this issue for the past 3 weeks and still no solution. Everytime the blend state is enabled in the DeviceContext is seems to apply to every texture drawn on the screen no matter the order of rendering.
Here is the source code of the header file:
class GameSpriteDemo : public Dx11DemoBase
{
public:
GameSpriteDemo( );
virtual ~GameSpriteDemo( );
bool LoadContent( );
void UnloadContent( );
void Update( float dt );
void Render( );
void DisableBlending();
ID3D11ShaderResourceView* getColorMap(int);
int UniqSpriteAmount();
// text rendering system
private:
bool DrawString( char* message, float startX, float startY );
private:
ID3D11VertexShader* solidColorVS_;
ID3D11VertexShader* solidColorVS_2;
ID3DX11Effect* effect_;
vector<ID3D11VertexShader*> solidColorVShaders;
ID3D11PixelShader* solidColorPS_;
ID3D11PixelShader* solidColorPS_2;
ID3D11InputLayout* inputLayout_;
ID3D11InputLayout* inputLayout2;
ID3D11Buffer* vertexBuffer_;
vector<ID3D11Buffer*> vertexBuffers;
ID3D11ShaderResourceView* colorMap_;
vector<ID3D11ShaderResourceView*> colorMaps;
ID3D11SamplerState* colorMapSampler_;
vector<ID3D11SamplerState*> colorSamplers;
ID3D11BlendState* alphaBlendState_;
ID3D11BlendState* alphaBlendStateOff;
vector<LPCTSTR> elements2D;
vector<GameSprite> sprites_;
ID3D11Buffer* mvpCB_;
XMMATRIX vpMatrix_;
};
#endif
Here is the source code of the cpp file: (the Base Class DxD11DemoClass is not needed since it only sets up SwapChain and D3DContext)
bool GameSpriteDemo::LoadContent( )
{
// add sprite textures to the engine, giving them index 0, 1, 2... etc.
elements2D.push_back("placeholder3.dds");
elements2D.push_back("cav.dds");
elements2D.push_back("Stats.dds");
// create sprites
XMFLOAT2 sprite1Pos( 0.0f, 500.0f);
GameSprite sprite1(getDevice(), "placeholder3.dds", 0, sprite1Pos, 0.0f);
XMFLOAT2 sprite1Scale( 0.5f, 0.5f );
XMFLOAT2 mini(0.8f, 0.6f);
sprite1.SetScale(mini);
sprites_.push_back(sprite1);
XMFLOAT2 sprite2Pos( 200.0f, 100.0f );
GameSprite sprite2(getDevice(), "Stats.dds", 2, sprite2Pos, 2.0f);
// sprites_.push_back(sprite2);
XMFLOAT2 sprite3Pos( 290.0f, 300.0f );
GameSprite sprite3(getDevice(), "cav.dds", 1, sprite3Pos, 2.0f);
sprite3.SetScale(sprite1Scale);
sprites_.push_back(sprite3);
// create vertex buffers, colorMaps and colorSamplers
for (int i = 0; i <= elements2D.size(); i++) {
solidColorVShaders.push_back(solidColorVS_);
colorMaps.push_back(colorMap_);
colorSamplers.push_back(colorMapSampler_);
vertexBuffers.push_back(vertexBuffer_);
}
// continue
ID3DBlob* vsBuffer = 0;
bool compileResult = CompileD3DShader( "TextureMap.fx", "VS_Main", "vs_4_0", &vsBuffer );
if( compileResult == false )
{
DXTRACE_MSG( "Error compiling the vertex shader!" );
return false;
}
HRESULT d3dResult;
d3dResult = d3dDevice_->CreateVertexShader( vsBuffer->GetBufferPointer( ),
vsBuffer->GetBufferSize( ), 0, &solidColorVShaders[0] );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Error creating the vertex shader!" );
if( vsBuffer )
vsBuffer->Release( );
return false;
}
D3D11_INPUT_ELEMENT_DESC solidColorLayout[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 }
};
unsigned int totalLayoutElements = ARRAYSIZE( solidColorLayout );
d3dResult = d3dDevice_->CreateInputLayout( solidColorLayout, totalLayoutElements,
vsBuffer->GetBufferPointer( ), vsBuffer->GetBufferSize( ), &inputLayout_ );
vsBuffer->Release( );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Error creating the input layout!" );
return false;
}
ID3DBlob* psBuffer = 0;
compileResult = CompileD3DShader( "TextureMap.fx", "PS_Main", "ps_4_0", &psBuffer );
if( compileResult == false )
{
DXTRACE_MSG( "Error compiling pixel shader!" );
return false;
}
d3dResult = d3dDevice_->CreatePixelShader( psBuffer->GetBufferPointer( ),
psBuffer->GetBufferSize( ), 0, &solidColorPS_ );
psBuffer->Release( );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Error creating pixel shader!" );
return false;
}
// create ColorMap, ColorSampler and VertexBuffer for each unique sprite
D3D11_SAMPLER_DESC colorMapDesc;
D3D11_BUFFER_DESC vertexDesc;
D3D11_SUBRESOURCE_DATA resourceData;
for (int j = 0; j < elements2D.size(); j++) {
d3dResult = D3DX11CreateShaderResourceViewFromFile( d3dDevice_,
elements2D[j], 0, 0, &colorMaps[j], 0 );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to load the texture image!" );
return false;
}
ZeroMemory( &colorMapDesc, sizeof( colorMapDesc ) );
colorMapDesc.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
colorMapDesc.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
colorMapDesc.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
colorMapDesc.ComparisonFunc = D3D11_COMPARISON_NEVER;
colorMapDesc.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
colorMapDesc.MaxLOD = D3D11_FLOAT32_MAX;
d3dResult = d3dDevice_->CreateSamplerState( &colorMapDesc, &colorSamplers[j] );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to create color map sampler state!" );
return false;
}
float Width = sprites_[j].getWidth();
float Height = sprites_[j].getHeight();
VertexPos vertices[] =
{
{ XMFLOAT3( Width, Height, 1.0f ), XMFLOAT2( 1.0f, 0.0f ) },
{ XMFLOAT3( Width, -Height, 1.0f ), XMFLOAT2( 1.0f, 1.0f ) },
{ XMFLOAT3( -Width, -Height, 1.0f ), XMFLOAT2( 0.0f, 1.0f ) },
{ XMFLOAT3( -Width, -Height, 1.0f ), XMFLOAT2( 0.0f, 1.0f ) },
{ XMFLOAT3( -Width, Height, 1.0f ), XMFLOAT2( 0.0f, 0.0f ) },
{ XMFLOAT3( Width, Height, 1.0f ), XMFLOAT2( 1.0f, 0.0f ) },
};
ZeroMemory( &vertexDesc, sizeof( vertexDesc ) );
vertexDesc.Usage = D3D11_USAGE_DEFAULT;
vertexDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
vertexDesc.ByteWidth = sizeof( VertexPos ) * 6;
ZeroMemory( &resourceData, sizeof( resourceData ) );
resourceData.pSysMem = vertices;
d3dResult = d3dDevice_->CreateBuffer( &vertexDesc, &resourceData, &vertexBuffers[j] );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to create vertex buffer!" );
return false;
}
}
// end of foor loop
// create text fonts
d3dResult = D3DX11CreateShaderResourceViewFromFile( d3dDevice_,
"font.dds", 0, 0, &colorMap_, 0 );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to load the texture image!" );
return false;
}
D3D11_SAMPLER_DESC colorMapDesc2;
ZeroMemory( &colorMapDesc2, sizeof( colorMapDesc2 ) );
colorMapDesc2.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
colorMapDesc2.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
colorMapDesc2.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
colorMapDesc2.ComparisonFunc = D3D11_COMPARISON_NEVER;
colorMapDesc2.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
colorMapDesc2.MaxLOD = D3D11_FLOAT32_MAX;
d3dResult = d3dDevice_->CreateSamplerState( &colorMapDesc2, &colorMapSampler_ );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to create color map sampler state!" );
return false;
}
D3D11_BUFFER_DESC vertexDesc2;
ZeroMemory( &vertexDesc2, sizeof( vertexDesc2 ) );
vertexDesc2.Usage = D3D11_USAGE_DYNAMIC;
vertexDesc2.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
vertexDesc2.BindFlags = D3D11_BIND_VERTEX_BUFFER;
const int sizeOfSprite = sizeof( VertexPos ) * 6;
const int maxLetters = 24;
vertexDesc2.ByteWidth = sizeOfSprite * maxLetters;
d3dResult = d3dDevice_->CreateBuffer( &vertexDesc2, 0, &vertexBuffer_ );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to create vertex buffer!" );
return false;
}
// end of font creation
D3D11_BUFFER_DESC constDesc;
ZeroMemory( &constDesc, sizeof( constDesc ) );
constDesc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
constDesc.ByteWidth = sizeof( XMMATRIX );
constDesc.Usage = D3D11_USAGE_DEFAULT;
d3dResult = d3dDevice_->CreateBuffer( &constDesc, 0, &mvpCB_ );
if( FAILED( d3dResult ) )
{
return false;
}
XMMATRIX view = XMMatrixIdentity( );
XMMATRIX projection = XMMatrixOrthographicOffCenterLH( 0.0f, 1366.0f, 0.0f, 768.0f, 0.1f, 100.0f );
vpMatrix_ = XMMatrixMultiply( view, projection );
D3D11_BLEND_DESC blendDesc;
ZeroMemory( &blendDesc, sizeof( blendDesc ) );
blendDesc.RenderTarget[0].BlendEnable = TRUE;
blendDesc.RenderTarget[0].BlendOp = D3D11_BLEND_OP_ADD;
blendDesc.RenderTarget[0].SrcBlend = D3D11_BLEND_SRC_ALPHA;
blendDesc.RenderTarget[0].DestBlend = D3D11_BLEND_ONE;
blendDesc.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD;
blendDesc.RenderTarget[0].SrcBlendAlpha = D3D11_BLEND_ZERO;
blendDesc.RenderTarget[0].DestBlendAlpha = D3D11_BLEND_ZERO;
blendDesc.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
float blendFactor[4] = { 0.0f, 0.0f, 0.0f, 0.0f };
d3dDevice_->CreateBlendState( &blendDesc, &alphaBlendState_ );
// create the disabled blend state
blendDesc.RenderTarget[0].BlendEnable = FALSE;
d3dDevice_->CreateBlendState( &blendDesc, &alphaBlendStateOff );
d3dContext_->OMSetBlendState( alphaBlendState_, blendFactor, 0xFFFFFFFF );
// holy shit here we go again
ID3DBlob* vsBuffer2 = 0;
compileResult = CompileD3DShader( "TextMap.fx", "VS_Main", "vs_4_0", &vsBuffer2 );
if( compileResult == false )
{
DXTRACE_MSG( "Error compiling the vertex shader!" );
return false;
}
d3dResult = d3dDevice_->CreateVertexShader( vsBuffer2->GetBufferPointer( ),
vsBuffer2->GetBufferSize( ), 0, &solidColorVS_2 );
ID3DBlob* psBuffer2 = 0;
compileResult = CompileD3DShader( "TextureMap.fx", "PS_Main", "ps_4_0", &psBuffer2 );
if( compileResult == false )
{
DXTRACE_MSG( "Error compiling pixel shader!" );
return false;
}
d3dResult = d3dDevice_->CreatePixelShader( psBuffer2->GetBufferPointer( ),
psBuffer2->GetBufferSize( ), 0, &solidColorPS_2 );
psBuffer2->Release( );
return true;
}
void GameSpriteDemo::UnloadContent( )
{
if( colorMapSampler_ ) colorMapSampler_->Release( );
if( colorMap_ ) colorMap_->Release( );
if( solidColorVShaders[0] ) solidColorVShaders[0]->Release( );
if( solidColorPS_ ) solidColorPS_->Release( );
if( inputLayout_ ) inputLayout_->Release( );
if( vertexBuffer_ ) vertexBuffer_->Release( );
if( mvpCB_ ) mvpCB_->Release( );
if( alphaBlendState_ ) alphaBlendState_->Release( );
if( alphaBlendStateOff ) alphaBlendStateOff->Release( );
colorMapSampler_ = 0;
colorMap_ = 0;
solidColorVShaders[0] = 0;
solidColorPS_ = 0;
inputLayout_ = 0;
vertexBuffer_ = 0;
mvpCB_ = 0;
alphaBlendState_ = 0;
alphaBlendStateOff = 0;
}
void GameSpriteDemo::Update( float dt )
{
// Nothing to update
}
bool GameSpriteDemo::DrawString( char* message, float startX, float startY )
{
// Size in bytes for a single sprite.
const int sizeOfSprite = sizeof( VertexPos ) * 6;
// Demo's dynamic buffer setup for max of 24 letters.
const int maxLetters = 24;
int length = strlen( message );
// Clamp for strings too long.
if( length > maxLetters )
length = maxLetters;
// Char's width on screen.
float charWidth = 32.0f / (1366.0f*1.0f);
// Char's height on screen.
float charHeight = 32.0f / (768.0f*1.0f);
// Char's texel width.
float texelWidth = 32.0f / 896.0f;
// verts per-triangle (3) * total triangles (2) = 6.
const int verticesPerLetter = 6;
D3D11_MAPPED_SUBRESOURCE mapResource;
HRESULT d3dResult = d3dContext_->Map( vertexBuffer_, 0, D3D11_MAP_WRITE_DISCARD, 0, &mapResource );
if( FAILED( d3dResult ) )
{
DXTRACE_MSG( "Failed to map resource!" );
return false;
}
// Point to our vertex buffer's internal data.
VertexPos *spritePtr = ( VertexPos* )mapResource.pData;
const int indexA = static_cast<char>( 'A' );
const int indexZ = static_cast<char>( 'Z' );
const int index_a = static_cast<char>( 'a' );
for( int i = 0; i < length; ++i )
{
float thisStartX = startX + ( charWidth * static_cast<float>( i ) );
float thisEndX = thisStartX + charWidth;
float thisEndY = startY + charHeight;
spritePtr[0].pos = XMFLOAT3( thisEndX, thisEndY, 1.0f );
spritePtr[1].pos = XMFLOAT3( thisEndX, startY, 1.0f );
spritePtr[2].pos = XMFLOAT3( thisStartX, startY, 1.0f );
spritePtr[3].pos = XMFLOAT3( thisStartX, startY, 1.0f );
spritePtr[4].pos = XMFLOAT3( thisStartX, thisEndY, 1.0f );
spritePtr[5].pos = XMFLOAT3( thisEndX, thisEndY, 1.0f );
int texLookup = 0;
int letter = static_cast<char>( message[i] );
if (letter == index_a) {
texLookup = (indexZ - indexA) + 2;
}
else if( letter < indexA || letter > indexZ )
{
// Grab one index past Z, which is a blank space in the texture.
texLookup = ( indexZ - indexA ) + 1;
}
else
{
// A = 0, B = 1, Z = 25, etc.
texLookup = ( letter - indexA );
}
float tuStart = 0.0f + ( texelWidth * static_cast<float>( texLookup ) );
float tuEnd = tuStart + texelWidth;
spritePtr[0].tex0 = XMFLOAT2( tuEnd, 0.0f );
spritePtr[1].tex0 = XMFLOAT2( tuEnd, 1.0f );
spritePtr[2].tex0 = XMFLOAT2( tuStart, 1.0f );
spritePtr[3].tex0 = XMFLOAT2( tuStart, 1.0f );
spritePtr[4].tex0 = XMFLOAT2( tuStart, 0.0f );
spritePtr[5].tex0 = XMFLOAT2( tuEnd, 0.0f );
spritePtr += 6;
}
d3dContext_->Unmap( vertexBuffer_, 0 );
d3dContext_->Draw( 6 * length, 0 );
return true;
}
void GameSpriteDemo::Render( )
{
if( d3dContext_ == 0 )
return;
float clearColor[4] = { 0.0f, 0.0f, 0.0f, 1.0f };
d3dContext_->ClearRenderTargetView( backBufferTarget_, clearColor );
unsigned int stride = sizeof( VertexPos );
unsigned int offset = 0;
d3dContext_->IASetInputLayout( inputLayout_ );
d3dContext_->IASetPrimitiveTopology( D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST );
d3dContext_->VSSetShader( solidColorVShaders[0], 0, 0 );
d3dContext_->PSSetShader( solidColorPS_, 0, 0 );
XMMATRIX world;
XMMATRIX mvp;
int index;
for (int k = 0; k < sprites_.size(); k++) {
index = sprites_[k].getIndex();
d3dContext_->IASetVertexBuffers( 0, 1, &vertexBuffers[index], &stride, &offset );
d3dContext_->PSSetShaderResources( 0, 1, &colorMaps[index] );
d3dContext_->PSSetSamplers( 0, 1, &colorSamplers[index] );
world = sprites_[k].GetWorldMatrix();
mvp = XMMatrixMultiply( world, vpMatrix_ );
mvp = XMMatrixTranspose( mvp );
d3dContext_->UpdateSubresource( mvpCB_, 0, 0, &mvp, 0, 0 );
d3dContext_->VSSetConstantBuffers( 0, 1, &mvpCB_ );
d3dContext_->Draw( 6, 0 );
}
// render text
DisableBlending();
d3dContext_->IASetVertexBuffers( 0, 1, &vertexBuffer_, &stride, &offset );
d3dContext_->PSSetShaderResources( 0, 1, &colorMap_ );
d3dContext_->PSSetSamplers( 0, 1, &colorMapSampler_ );
d3dContext_->VSSetShader( solidColorVS_2, 0, 0 );
d3dContext_->PSSetShader( solidColorPS_2, 0, 0 );
DrawString( "PROTOTYPE TEXT", -0.2f, 0.0f );
swapChain_->Present( 0, 0 );
}
ID3D11ShaderResourceView* GameSpriteDemo::getColorMap(int index) {
return colorMaps[index];
}
// this functions disables the alpha blending
void GameSpriteDemo::DisableBlending() {
float blendFactor[4] = { 0.0f, 0.0f, 0.0f, 0.0f };
d3dContext_->OMSetBlendState( alphaBlendStateOff, blendFactor, 0xFFFFFFFF );
}
Screen of the results when drawing with blending enabled
http://i49.tinypic.com/33kao9v.jpg
and here is the HLSL code for the two FX files I'm using (the first for the sprites, the second for the text)
TextureMap.fx
cbuffer cbChangesPerFrame : register( b0 )
{
matrix mvp_;
};
Texture2D colorMap_ : register( t0 );
SamplerState colorSampler_ : register( s0 );
struct VS_Input
{
float4 pos : POSITION;
float2 tex0 : TEXCOORD0;
};
struct PS_Input
{
float4 pos : SV_POSITION;
float2 tex0 : TEXCOORD0;
};
PS_Input VS_Main( VS_Input vertex )
{
PS_Input vsOut = ( PS_Input )0;
vsOut.pos = mul( vertex.pos, mvp_ );
vsOut.tex0 = vertex.tex0;
return vsOut;
}
float4 PS_Main( PS_Input frag ) : SV_TARGET
{
return colorMap_.Sample( colorSampler_, frag.tex0 );
}
TextMap.fx
Texture2D colorMap_ : register( t0 );
SamplerState colorSampler_ : register( s0 );
struct VS_Input
{
float4 pos : POSITION;
float2 tex0 : TEXCOORD0;
};
struct PS_Input
{
float4 pos : SV_POSITION;
float2 tex0 : TEXCOORD0;
};
PS_Input VS_Main( VS_Input vertex )
{
PS_Input vsOut = ( PS_Input )0;
vsOut.pos = vertex.pos;
vsOut.tex0 = vertex.tex0;
return vsOut;
}
float4 PS_Main( PS_Input frag ) : SV_TARGET
{
return colorMap_.Sample( colorSampler_, frag.tex0 );
}
I have wondered maybe to use pixel shaders to perform alpha blending for specific 2D images but my first attempts has not worked at all.
Hope someone can find a solution somehow :)
and thanks again for the help.

What kind of blurs can be implemented in pixel shaders?

Gaussian, box, radial, directional, motion blur, zoom blur, etc.
I read that Gaussian blur can be broken down in passes that could be implemented in pixel shaders, but couldn't find any samples.
Is it right to assume that any effect that concerns itself with pixels other than itself, can't be implemented in pixel shaders?
You can implement everything, as long you are able to pass information to the shader.
The trick, in this cases, is to perform a multiple pass rendering. The final shader will take a certain number of samplers, that are the non-blurred sources, which are used to compute blurred values.
For example, using multiple textures is possible to emulate effects based on the accumulation buffer.
to implement a gaussian blur, render the scene onto a frambuffer object, with attached a texture on the color attachment. This is the first pass.
As second pass, render a textured quad, where the texture is the one generated in the first step. Textures coordinates are passed from vertex stage to fragment stage, interpolated across the quad. Indeed you have texture coordinate for each fragment; apply a offset for each coordinate to fetch textels around the underlying one, and perform the gaussian blur.
general 'pipeline' for postprocessing effects
setRenderTarget(myRenderTarget); // or FBO in GL
drawAwsomeScene();
setdefaultRenderTarget(); // draw to screen...
blurShader.use();
// shader needs to know what is the size of one pixel on the screen
blurShader.uniform2f("texelSize", 1/screenW, 1/screenH);
// set the texture with scene rendered...
setRenderTargetTexture();
drawFullScreenQuad();
// other effects...
useful examples/tutorial for blur: http://www.gamerendering.com/2008/10/11/gaussian-blur-filter-shader/
I implemented a generic convolution fragment shader (pixel shader)
#version 120
uniform sampler2D texUnit;
uniform float[9] conMatrix;
uniform float conWeight;
uniform vec2 conPixel;
void main(void)
{
vec4 color = vec4(0.0);
vec2 texCoord = gl_TexCoord[0].st;
vec2 offset = conPixel * 1.5;
vec2 start = texCoord - offset;
vec2 current = start;
for (int i = 0; i < 9; i++)
{
color += texture2D( texUnit, current ) * conMatrix[i];
current.x += conPixel.x;
if (i == 2 || i == 5) {
current.x = start.x;
current.y += conPixel.y;
}
}
gl_FragColor = color * conWeight;
}
For a Blur:
where conPixel is {1/screen width, 1/screen height}
where conMatrix is {1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0}
where conWeight is 1.0 / 9.0
where texUnit is 0
An often seen implementation to achieve a blur effect of a scene is the Gaussian blur, implemented by a 2 pass post-processing.
This is an approximation which first blurs along the X-Axis in the 1st pass and along the Y-Axis in the 2nd pass (or vice versa). This results in a better performance for strong blurring.
The blur shader uses a normal (or Gaussian) distribution. For the 2 passes is used the same shader program, with individual direction settings for the 2 passes, stored in the uniform vec2 u_dir. The strength of the blur effect can be varied with the uniform variable float u_sigma in the range [0.0, 1.0].
The scene is written to frame buffer with a texture bound to the color plane. A screen space pass uses the texture as the input to blur the output along the X-axis. The X-axis blur pass writes to another frame buffer, with a texture bound to its color plane. This texture is used as the input, for the final blur process along the Y-axis.
A detailed description of the blur algorithm can be found at the answer to the question OpenGL es 2.0 Gaussian blur on triangle or LeranOpenGL - Gaussian blur.
Blur Vertex shader
#version 330
in vec2 inPos;
out vec2 pos;
void main()
{
pos = inPos;
gl_Position = vec4( inPos, 0.0, 1.0 );
}
Blur Fragment shader
#version 330
in vec2 pos;
uniform sampler2D u_texture;
uniform vec2 u_textureSize;
uniform float u_sigma;
uniform vec2 u_dir;
float CalcGauss( float x, float sigma )
{
if ( sigma <= 0.0 )
return 0.0;
return exp( -(x*x) / (2.0 * sigma) ) / (2.0 * 3.14157 * sigma);
}
void main()
{
vec2 texC = pos.st * 0.5 + 0.5;
vec4 texCol = texture2D( u_texture, texC );
vec4 gaussCol = vec4( texCol.rgb, 1.0 );
vec2 step = u_dir / u_textureSize;
for ( int i = 1; i <= 32; ++ i )
{
float weight = CalcGauss( float(i) / 32.0, u_sigma * 0.5 );
if ( weight < 1.0/255.0 )
break;
texCol = texture2D( u_texture, texC + step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
texCol = texture2D( u_texture, texC - step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
}
gaussCol.rgb = clamp( gaussCol.rgb / gaussCol.w, 0.0, 1.0 );
gl_FragColor = vec4( gaussCol.rgb, 1.0 );
}
See also the answers to the following question:
Fast Gaussian blur at pause
OpenGL es 2.0 Gaussian blur on triangle
How to get a "Glow" shader effect in OpenGL ES 2.0?
See a WebGL example:
var readInput = true;
function changeEventHandler(event){
readInput = true;
}
(function loadscene() {
var resize, gl, progDraw, progBlur, vp_size, blurFB;
var canvas;
var camera;
var bufCube = {};
var bufQuad = {};
var shininess = 10.0;
var glow = 10.0;
var sigma = 0.8;
var radius = 1.0;
function render(deltaMS){
if ( readInput ) {
//readInput = false;
var sliderScale = 100;
sigma = document.getElementById( "sigma" ).value / sliderScale;
radius = document.getElementById( "radius" ).value / sliderScale;
}
vp_size = [canvas.width, canvas.height];
camera.Update( vp_size );
gl.enable( gl.DEPTH_TEST );
gl.clearColor( 0.0, 0.0, 0.0, 1.0 );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
// set up framebuffer
gl.bindFramebuffer( gl.FRAMEBUFFER, blurFB[0] );
gl.viewport( 0, 0, blurFB[0].width, blurFB[0].height );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
// setup view projection and model
var prjMat = camera.Perspective();
var viewMat = camera.Orbit();
var modelMat = IdentM44();
modelMat = camera.AutoModelMatrix();
// set up draw shader
ShProg.Use( progDraw.prog );
ShProg.SetM44( progDraw.prog, "u_projectionMat44", prjMat );
ShProg.SetM44( progDraw.prog, "u_modelViewMat44", Multiply(viewMat, modelMat) );
ShProg.SetF1( progDraw.prog, "u_shininess", shininess );
// draw scene
VertexBuffer.Draw( bufCube );
// set blur-X framebuffer and bind frambuffer texture
gl.bindFramebuffer( gl.FRAMEBUFFER, blurFB[1] );
gl.viewport( 0, 0, blurFB[1].width, blurFB[1].height );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
var texUnit = 1;
gl.activeTexture( gl.TEXTURE0 + texUnit );
gl.bindTexture( gl.TEXTURE_2D, blurFB[0].color0_texture );
// set up blur-X shader
ShProg.Use( progBlur.prog );
ShProg.SetI1( progBlur.prog, "u_texture", texUnit )
ShProg.SetF2( progBlur.prog, "u_textureSize", vp_size );
ShProg.SetF1( progBlur.prog, "u_sigma", sigma )
ShProg.SetF1( progBlur.prog, "u_radius", radius )
ShProg.SetF2( progBlur.prog, "u_dir", [1.0, 0.0] )
// draw full screen space
gl.enableVertexAttribArray( progBlur.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.vertexAttribPointer( progBlur.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.drawElements( gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progBlur.inPos );
// reset framebuffer and bind frambuffer texture
gl.bindFramebuffer( gl.FRAMEBUFFER, null );
gl.viewport( 0, 0, vp_size[0], vp_size[1] );
gl.clear( gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT );
texUnit = 2;
gl.activeTexture( gl.TEXTURE0 + texUnit );
gl.bindTexture( gl.TEXTURE_2D, blurFB[1].color0_texture );
// set up pst process shader
ShProg.SetI1( progBlur.prog, "u_texture", texUnit )
ShProg.SetF1( progBlur.prog, "u_radius", radius )
ShProg.SetF2( progBlur.prog, "u_dir", [0.0, 1.0] )
// draw full screen space
gl.enableVertexAttribArray( progBlur.inPos );
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.vertexAttribPointer( progBlur.inPos, 2, gl.FLOAT, false, 0, 0 );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.drawElements( gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0 );
gl.disableVertexAttribArray( progBlur.inPos );
requestAnimationFrame(render);
}
function initScene() {
canvas = document.getElementById( "canvas");
gl = canvas.getContext( "experimental-webgl" );
if ( !gl )
return null;
progDraw = {}
progDraw.prog = ShProg.Create(
[ { source : "draw-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "draw-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
if ( !progDraw.prog )
return null;
progDraw.inPos = gl.getAttribLocation( progDraw.prog, "inPos" );
progDraw.inNV = gl.getAttribLocation( progDraw.prog, "inNV" );
progDraw.inCol = gl.getAttribLocation( progDraw.prog, "inCol" );
progBlur = {}
progBlur.prog = ShProg.Create(
[ { source : "post-shader-vs", stage : gl.VERTEX_SHADER },
{ source : "blur-shader-fs", stage : gl.FRAGMENT_SHADER }
] );
progBlur.inPos = gl.getAttribLocation( progBlur.prog, "inPos" );
if ( !progBlur.prog )
return;
// create cube
var cubePos = [
-1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 1.0, 1.0, 1.0, -1.0, 1.0, 1.0,
-1.0, -1.0, -1.0, 1.0, -1.0, -1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0 ];
var cubeCol = [ 1.0, 0.0, 0.0, 1.0, 0.5, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 ];
var cubeHlpInx = [ 0, 1, 2, 3, 1, 5, 6, 2, 5, 4, 7, 6, 4, 0, 3, 7, 3, 2, 6, 7, 1, 0, 4, 5 ];
var cubePosData = [];
for ( var i = 0; i < cubeHlpInx.length; ++ i ) {
cubePosData.push( cubePos[cubeHlpInx[i]*3], cubePos[cubeHlpInx[i]*3+1], cubePos[cubeHlpInx[i]*3+2] );
}
var cubeNVData = [];
for ( var i1 = 0; i1 < cubeHlpInx.length; i1 += 4 ) {
var nv = [0, 0, 0];
for ( i2 = 0; i2 < 4; ++ i2 ) {
var i = i1 + i2;
nv[0] += cubePosData[i*3]; nv[1] += cubePosData[i*3+1]; nv[2] += cubePosData[i*3+2];
}
for ( i2 = 0; i2 < 4; ++ i2 )
cubeNVData.push( nv[0], nv[1], nv[2] );
}
var cubeColData = [];
for ( var is = 0; is < 6; ++ is ) {
for ( var ip = 0; ip < 4; ++ ip ) {
cubeColData.push( cubeCol[is*3], cubeCol[is*3+1], cubeCol[is*3+2] );
}
}
var cubeInxData = [];
for ( var i = 0; i < cubeHlpInx.length; i += 4 ) {
cubeInxData.push( i, i+1, i+2, i, i+2, i+3 );
}
bufCube = VertexBuffer.Create(
[ { data : cubePosData, attrSize : 3, attrLoc : progDraw.inPos },
{ data : cubeNVData, attrSize : 3, attrLoc : progDraw.inNV },
{ data : cubeColData, attrSize : 3, attrLoc : progDraw.inCol } ],
cubeInxData );
bufQuad.pos = gl.createBuffer();
gl.bindBuffer( gl.ARRAY_BUFFER, bufQuad.pos );
gl.bufferData( gl.ARRAY_BUFFER, new Float32Array( [ -1.0, -1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0 ] ), gl.STATIC_DRAW );
bufQuad.inx = gl.createBuffer();
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, bufQuad.inx );
gl.bufferData( gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( [ 0, 1, 2, 0, 2, 3 ] ), gl.STATIC_DRAW );
camera = new Camera( [0, 3, 0.0], [0, 0, 0], [0, 0, 1], 90, vp_size, 0.5, 100 );
window.onresize = resize;
resize();
requestAnimationFrame(render);
}
function resize() {
//vp_size = [gl.drawingBufferWidth, gl.drawingBufferHeight];
vp_size = [window.innerWidth, window.innerHeight]
//vp_size = [256, 256]
canvas.width = vp_size[0];
canvas.height = vp_size[1];
var fbsize = Math.max(vp_size[0], vp_size[1]);
fbsize = 1 << 31 - Math.clz32(fbsize); // nearest power of 2
blurFB = [];
for ( var i = 0; i < 2; ++ i ) {
fb = gl.createFramebuffer();
fb.width = fbsize;
fb.height = fbsize;
gl.bindFramebuffer( gl.FRAMEBUFFER, fb );
fb.color0_texture = gl.createTexture();
gl.bindTexture( gl.TEXTURE_2D, fb.color0_texture );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST );
gl.texParameteri( gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST );
gl.texImage2D( gl.TEXTURE_2D, 0, gl.RGBA, fb.width, fb.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null );
fb.renderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer( gl.RENDERBUFFER, fb.renderbuffer );
gl.renderbufferStorage( gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, fb.width, fb.height );
gl.framebufferTexture2D( gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, fb.color0_texture, 0 );
gl.framebufferRenderbuffer( gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, fb.renderbuffer );
gl.bindTexture( gl.TEXTURE_2D, null );
gl.bindRenderbuffer( gl.RENDERBUFFER, null );
gl.bindFramebuffer( gl.FRAMEBUFFER, null );
blurFB.push( fb );
}
}
function Fract( val ) {
return val - Math.trunc( val );
}
function CalcAng( deltaTime, intervall ) {
return Fract( deltaTime / (1000*intervall) ) * 2.0 * Math.PI;
}
function CalcMove( deltaTime, intervall, range ) {
var pos = self.Fract( deltaTime / (1000*intervall) ) * 2.0
var pos = pos < 1.0 ? pos : (2.0-pos)
return range[0] + (range[1] - range[0]) * pos;
}
function EllipticalPosition( a, b, angRag ) {
var a_b = a * a - b * b
var ea = (a_b <= 0) ? 0 : Math.sqrt( a_b );
var eb = (a_b >= 0) ? 0 : Math.sqrt( -a_b );
return [ a * Math.sin( angRag ) - ea, b * Math.cos( angRag ) - eb, 0 ];
}
function IdentM44() {
return [ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ];
};
function RotateAxis(matA, angRad, axis) {
var aMap = [ [1, 2], [2, 0], [0, 1] ];
var a0 = aMap[axis][0], a1 = aMap[axis][1];
var sinAng = Math.sin(angRad), cosAng = Math.cos(angRad);
var matB = matA.slice(0);
for ( var i = 0; i < 3; ++ i ) {
matB[a0*4+i] = matA[a0*4+i] * cosAng + matA[a1*4+i] * sinAng;
matB[a1*4+i] = matA[a0*4+i] * -sinAng + matA[a1*4+i] * cosAng;
}
return matB;
}
function Rotate(matA, angRad, axis) {
var s = Math.sin(angRad), c = Math.cos(angRad);
var x = axis[0], y = axis[1], z = axis[2];
matB = [
x*x*(1-c)+c, x*y*(1-c)-z*s, x*z*(1-c)+y*s, 0,
y*x*(1-c)+z*s, y*y*(1-c)+c, y*z*(1-c)-x*s, 0,
z*x*(1-c)-y*s, z*y*(1-c)+x*s, z*z*(1-c)+c, 0,
0, 0, 0, 1 ];
return Multiply(matA, matB);
}
function Multiply(matA, matB) {
matC = IdentM44();
for (var i0=0; i0<4; ++i0 )
for (var i1=0; i1<4; ++i1 )
matC[i0*4+i1] = matB[i0*4+0] * matA[0*4+i1] + matB[i0*4+1] * matA[1*4+i1] + matB[i0*4+2] * matA[2*4+i1] + matB[i0*4+3] * matA[3*4+i1]
return matC;
}
function Cross( a, b ) { return [ a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0], 0.0 ]; }
function Dot( a, b ) { return a[0]*b[0] + a[1]*b[1] + a[2]*b[2]; }
function Normalize( v ) {
var len = Math.sqrt( v[0] * v[0] + v[1] * v[1] + v[2] * v[2] );
return [ v[0] / len, v[1] / len, v[2] / len ];
}
Camera = function( pos, target, up, fov_y, vp, near, far ) {
this.Time = function() { return Date.now(); }
this.pos = pos;
this.target = target;
this.up = up;
this.fov_y = fov_y;
this.vp = vp;
this.near = near;
this.far = far;
this.orbit_mat = this.current_orbit_mat = this.model_mat = this.current_model_mat = IdentM44();
this.mouse_drag = this.auto_spin = false;
this.auto_rotate = true;
this.mouse_start = [0, 0];
this.mouse_drag_axis = [0, 0, 0];
this.mouse_drag_angle = 0;
this.mouse_drag_time = 0;
this.drag_start_T = this.rotate_start_T = this.Time();
this.Ortho = function() {
var fn = this.far + this.near;
var f_n = this.far - this.near;
var w = this.vp[0];
var h = this.vp[1];
return [
2/w, 0, 0, 0,
0, 2/h, 0, 0,
0, 0, -2/f_n, 0,
0, 0, -fn/f_n, 1 ];
};
this.Perspective = function() {
var n = this.near;
var f = this.far;
var fn = f + n;
var f_n = f - n;
var r = this.vp[0] / this.vp[1];
var t = 1 / Math.tan( Math.PI * this.fov_y / 360 );
return [
t/r, 0, 0, 0,
0, t, 0, 0,
0, 0, -fn/f_n, -1,
0, 0, -2*f*n/f_n, 0 ];
};
this.LookAt = function() {
var mz = Normalize( [ this.pos[0]-this.target[0], this.pos[1]-this.target[1], this.pos[2]-this.target[2] ] );
var mx = Normalize( Cross( this.up, mz ) );
var my = Normalize( Cross( mz, mx ) );
var tx = Dot( mx, this.pos );
var ty = Dot( my, this.pos );
var tz = Dot( [-mz[0], -mz[1], -mz[2]], this.pos );
return [mx[0], my[0], mz[0], 0, mx[1], my[1], mz[1], 0, mx[2], my[2], mz[2], 0, tx, ty, tz, 1];
};
this.Orbit = function() {
return Multiply(this.LookAt(), this.OrbitMatrix());
};
this.OrbitMatrix = function() {
return (this.mouse_drag || (this.auto_rotate && this.auto_spin)) ? Multiply(this.current_orbit_mat, this.orbit_mat) : this.orbit_mat;
};
this.AutoModelMatrix = function() {
return this.auto_rotate ? Multiply(this.current_model_mat, this.model_mat) : this.model_mat;
};
this.Update = function(vp_size) {
if (vp_size)
this.vp = vp_size;
var current_T = this.Time();
this.current_model_mat = IdentM44()
if (this.mouse_drag) {
this.current_orbit_mat = Rotate(IdentM44(), this.mouse_drag_angle, this.mouse_drag_axis);
} else if (this.auto_rotate) {
if (this.auto_spin ) {
if (this.mouse_drag_time > 0 ) {
var angle = this.mouse_drag_angle * (current_T - this.rotate_start_T) / this.mouse_drag_time;
this.current_orbit_mat = Rotate(IdentM44(), angle, this.mouse_drag_axis);
}
} else {
var auto_angle_x = Fract( (current_T - this.rotate_start_T) / 13000.0 ) * 2.0 * Math.PI;
var auto_angle_y = Fract( (current_T - this.rotate_start_T) / 17000.0 ) * 2.0 * Math.PI;
this.current_model_mat = RotateAxis( this.current_model_mat, auto_angle_x, 0 );
this.current_model_mat = RotateAxis( this.current_model_mat, auto_angle_y, 1 );
}
}
};
this.ChangeMotionMode = function(drag, spin, auto ) {
var new_drag = drag;
var new_auto = new_drag ? false : auto;
var new_spin = new_auto ? spin : false;
change = this.mouse_drag != new_drag || this.auto_rotate != new_auto || this.auto_spin != new_spin;
if (!change)
return;
if (new_drag && !this.mouse_drag) {
this.drag_start_T = this.Time();
this.mouse_drag_angle = 0.0;
this.mouse_drag_time = 0;
}
if (new_auto && !this.auto_rotate)
this.rotate_start_T = this.Time();
this.mouse_drag = new_drag;
this.auto_rotate = new_auto;
this.auto_spin = new_spin;
this.orbit_mat = Multiply(this.current_orbit_mat, this.orbit_mat);
this.current_orbit_mat = IdentM44();
this.model_mat = Multiply(this.current_model_mat, this.model_mat);
this.current_model_mat = IdentM44();
};
this.OnMouseDown = function( event ) {
var rect = gl.canvas.getBoundingClientRect();
if ( event.clientX < rect.left || event.clientX > rect.right ) return;
if ( event.clientY < rect.top || event.clientY > rect.bottom ) return;
if (event.button == 0) { // left button
this.mouse_start = [event.clientX, event.clientY];
this.ChangeMotionMode( true, false, false );
}
};
this.OnMouseUp = function( event ) {
if (event.button == 0) { // left button
this.ChangeMotionMode( false, true, true );
} else if (event.button == 1) {// middle button
this.ChangeMotionMode( false, false, !this.auto_rotate );
}
};
this.OnMouseMove = function( event ) {
var dx = (event.clientX-this.mouse_start[0]) / this.vp[0];
var dy = (event.clientY-this.mouse_start[1]) / this.vp[1];
var len = Math.sqrt(dx*dx + dy*dy);
if (this.mouse_drag && len > 0) {
this.mouse_drag_angle = Math.PI*len;
this.mouse_drag_axis = [dy/len, 0, -dx/len];
this.mouse_drag_time = this.Time() - this.drag_start_T;
}
};
this.domElement = document;
var cam = this;
//this.domElement.addEventListener( 'contextmenu', function(e) { event.preventDefault(); }, false );
this.domElement.addEventListener( 'mousedown', function(e) { cam.OnMouseDown(e) }, false );
this.domElement.addEventListener( 'mouseup', function(e) { cam.OnMouseUp(e) }, false );
this.domElement.addEventListener( 'mousemove', function(e) { cam.OnMouseMove(e) }, false );
//this.domElement.addEventListener( 'mousewheel', hid_events.onMouseWheel, false );
//this.domElement.addEventListener( 'DOMMouseScroll', hid_events.onMouseWheel, false ); // firefox
}
var ShProg = {};
ShProg.Create = function( shaderList ) {
var shaderObjs = [];
for ( var i_sh = 0; i_sh < shaderList.length; ++ i_sh ) {
var shderObj = this.Compile( shaderList[i_sh].source, shaderList[i_sh].stage );
if ( shderObj == 0 )
return 0;
shaderObjs.push( shderObj );
}
var progObj = this.Link( shaderObjs )
if ( progObj != 0 ) {
progObj.attrInx = {};
var noOfAttributes = gl.getProgramParameter( progObj, gl.ACTIVE_ATTRIBUTES );
for ( var i_n = 0; i_n < noOfAttributes; ++ i_n ) {
var name = gl.getActiveAttrib( progObj, i_n ).name;
progObj.attrInx[name] = gl.getAttribLocation( progObj, name );
}
progObj.uniLoc = {};
var noOfUniforms = gl.getProgramParameter( progObj, gl.ACTIVE_UNIFORMS );
for ( var i_n = 0; i_n < noOfUniforms; ++ i_n ) {
var name = gl.getActiveUniform( progObj, i_n ).name;
progObj.uniLoc[name] = gl.getUniformLocation( progObj, name );
}
}
return progObj;
}
ShProg.AttrI = function( progObj, name ) { return progObj.attrInx[name]; }
ShProg.UniformL = function( progObj, name ) { return progObj.uniLoc[name]; }
ShProg.Use = function( progObj ) { gl.useProgram( progObj ); }
ShProg.SetI1 = function( progObj, name, val ) { if(progObj.uniLoc[name]) gl.uniform1i( progObj.uniLoc[name], val ); }
ShProg.SetF1 = function( progObj, name, val ) { if(progObj.uniLoc[name]) gl.uniform1f( progObj.uniLoc[name], val ); }
ShProg.SetF2 = function( progObj, name, arr ) { if(progObj.uniLoc[name]) gl.uniform2fv( progObj.uniLoc[name], arr ); }
ShProg.SetF3 = function( progObj, name, arr ) { if(progObj.uniLoc[name]) gl.uniform3fv( progObj.uniLoc[name], arr ); }
ShProg.SetF4 = function( progObj, name, arr ) { if(progObj.uniLoc[name]) gl.uniform4fv( progObj.uniLoc[name], arr ); }
ShProg.SetM33 = function( progObj, name, mat ) { if(progObj.uniLoc[name]) gl.uniformMatrix3fv( progObj.uniLoc[name], false, mat ); }
ShProg.SetM44 = function( progObj, name, mat ) { if(progObj.uniLoc[name]) gl.uniformMatrix4fv( progObj.uniLoc[name], false, mat ); }
ShProg.Compile = function( source, shaderStage ) {
var shaderScript = document.getElementById(source);
if (shaderScript)
source = shaderScript.text;
var shaderObj = gl.createShader( shaderStage );
gl.shaderSource( shaderObj, source );
gl.compileShader( shaderObj );
var status = gl.getShaderParameter( shaderObj, gl.COMPILE_STATUS );
if ( !status ) alert(gl.getShaderInfoLog(shaderObj));
return status ? shaderObj : null;
}
ShProg.Link = function( shaderObjs ) {
var prog = gl.createProgram();
for ( var i_sh = 0; i_sh < shaderObjs.length; ++ i_sh )
gl.attachShader( prog, shaderObjs[i_sh] );
gl.linkProgram( prog );
status = gl.getProgramParameter( prog, gl.LINK_STATUS );
if ( !status ) alert("Could not initialise shaders");
gl.useProgram( null );
return status ? prog : null;
}
var VertexBuffer = {
Create: function(attribs, indices) {
var buffer = { buf: [], attr: [], inx: gl.createBuffer(), inxLen: indices.length };
for (var i=0; i<attribs.length; ++i) {
buffer.buf.push(gl.createBuffer());
buffer.attr.push({ size : attribs[i].attrSize, loc : attribs[i].attrLoc });
gl.bindBuffer(gl.ARRAY_BUFFER, buffer.buf[i]);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array( attribs[i].data ), gl.STATIC_DRAW);
}
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffer.inx);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array( indices ), gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
return buffer;
},
Draw: function(bufObj) {
for (var i=0; i<bufObj.buf.length; ++i) {
gl.bindBuffer(gl.ARRAY_BUFFER, bufObj.buf[i]);
gl.vertexAttribPointer(bufObj.attr[i].loc, bufObj.attr[i].size, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray( bufObj.attr[i].loc);
}
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, bufObj.inx);
gl.drawElements(bufObj.primitve_type ? bufObj.primitve_type : gl.TRIANGLES, bufObj.inxLen, gl.UNSIGNED_SHORT, 0);
for (var i=0; i<bufObj.buf.length; ++i)
gl.disableVertexAttribArray(bufObj.attr[i].loc);
gl.bindBuffer( gl.ARRAY_BUFFER, null );
gl.bindBuffer( gl.ELEMENT_ARRAY_BUFFER, null );
} };
initScene();
})();
html,body { margin: 0; overflow: hidden; }
#gui { position : absolute; top : 0; left : 0; }
<script id="draw-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec3 inPos;
attribute vec3 inNV;
attribute vec3 inCol;
varying vec3 vertPos;
varying vec3 vertNV;
varying vec3 vertCol;
uniform mat4 u_projectionMat44;
uniform mat4 u_modelViewMat44;
void main()
{
vertNV = mat3( u_modelViewMat44 ) * normalize( inNV );
vertCol = inCol;
vec4 pos = u_modelViewMat44 * vec4( inPos, 1.0 );
vertPos = pos.xyz / pos.w;
gl_Position = u_projectionMat44 * pos;
}
</script>
<script id="draw-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec3 vertPos;
varying vec3 vertNV;
varying vec3 vertCol;
uniform float u_shininess;
void main()
{
vec3 color = vertCol;
vec3 normalV = normalize( vertNV );
vec3 eyeV = normalize( -vertPos );
vec3 halfV = normalize( eyeV + normalV );
float NdotH = max( 0.0, dot( normalV, halfV ) );
float shineFac = ( u_shininess + 2.0 ) * pow( NdotH, u_shininess ) / ( 2.0 * 3.14159265 );
gl_FragColor = vec4( color.rgb * (0.2 + NdotH), 1.0 );
}
</script>
<script id="post-shader-vs" type="x-shader/x-vertex">
precision mediump float;
attribute vec2 inPos;
varying vec2 pos;
void main()
{
pos = inPos;
gl_Position = vec4( inPos, 0.0, 1.0 );
}
</script>
<script id="blur-shader-fs" type="x-shader/x-fragment">
precision mediump float;
varying vec2 pos;
uniform sampler2D u_texture;
uniform vec2 u_textureSize;
uniform float u_sigma;
uniform float u_radius;
uniform vec2 u_dir;
float CalcGauss( float x, float sigma )
{
if ( sigma <= 0.0 )
return 0.0;
return exp( -(x*x) / (2.0 * sigma) ) / (2.0 * 3.14157 * sigma);
}
void main()
{
vec2 texC = pos.st * 0.5 + 0.5;
vec4 texCol = texture2D( u_texture, texC );
vec4 gaussCol = vec4( texCol.rgb, 1.0 );
vec2 step = u_dir / u_textureSize;
for ( int i = 1; i <= 32; ++ i )
{
float weight = CalcGauss( float(i) / 32.0, u_sigma * 0.5 );
if ( weight < 1.0/255.0 )
break;
texCol = texture2D( u_texture, texC + u_radius * step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
texCol = texture2D( u_texture, texC - u_radius * step * float(i) );
gaussCol += vec4( texCol.rgb * weight, weight );
}
gaussCol.rgb = clamp( gaussCol.rgb / gaussCol.w, 0.0, 1.0 );
gl_FragColor = vec4( gaussCol.rgb, 1.0 );
}
</script>
<div>
<form id="gui" name="inputs">
<table>
<tr> <td> <font color= #CCF>radius</font> </td>
<td> <input type="range" id="radius" min="1" max="1000" value="350" onchange="changeEventHandler(event);"/></td> </tr>
<tr> <td> <font color= #CCF>blur</font> </td>
<td> <input type="range" id="sigma" min="1" max="100" value="5" onchange="changeEventHandler(event);"/></td> </tr>
</table>
</form>
</div>
<canvas id="canvas" style="border: none;"></canvas>