What I want to attrive is to render many small quads with this opengl function "glDrawArraysInstanced", the space between them is the same. For example, please refer to the follwing image:
The code is as follow:
void OpenGLShowVideo::displayBySmallMatrix()
{
// Now use QOpenGLExtraFunctions instead of QOpenGLFunctions as we want to
// do more than what GL(ES) 2.0 offers.
QOpenGLExtraFunctions *f = QOpenGLContext::currentContext()->extraFunctions();
f->glClearColor(9.f/255.0f, 14.f/255.0f, 15.f/255.0f, 1);
glClear(GL_COLOR_BUFFER_BIT);
f->glViewport(0, 0, this->width(), this->height());
m_displayByMatrixProgram->bind();
f->glActiveTexture(GL_TEXTURE0 + m_acRenderToScreenTexUnit);
f->glBindTexture(GL_TEXTURE_2D, m_renderWithMaskFbo->texture());
if (m_uniformsDirty) {
m_uniformsDirty = false;
m_displayByMatrixProgram->setUniformValue(m_samplerLoc, m_acRenderToScreenTexUnit);
m_proj.setToIdentity();
m_proj.perspective(INIT_VERTICAL_ANGLE, float(this->width()) / float(this->height()), m_fNearPlane, m_fFarPlane);
m_displayByMatrixProgram->setUniformValue(m_projMatrixLoc, m_proj);
QMatrix4x4 camera;
camera.lookAt(m_eye, m_eye + m_target, QVector3D(0, 1, 0));
m_displayByMatrixProgram->setUniformValue(m_camMatrixLoc, camera);
m_world.setToIdentity();
float fOffsetZ = m_fVerticalAngle / INIT_VERTICAL_ANGLE;
m_world.translate(m_fMatrixOffsetX, m_fMatrixOffsetY, fOffsetZ);
m_proj.scale(MATRIX_INIT_SCALE_X, MATRIX_INIT_SCALE_Y, 1.0f);
m_world.rotate(180, 1, 0, 0);
QMatrix4x4 wm = m_world;
m_displayByMatrixProgram->setUniformValue(m_worldMatrixLoc, wm);
QMatrix4x4 mm;
mm.setToIdentity();
m_displayByMatrixProgram->setUniformValue(m_myMatrixLoc, mm);
m_displayByMatrixProgram->setUniformValue(m_lightPosLoc, QVector3D(0, 0, 70));
QSize tmpSize = QSize(m_viewPortWidth, m_viewPortHeight);
m_displayByMatrixProgram->setUniformValue(m_resolutionLoc, tmpSize);
int whRatioVal = m_viewPortWidth / m_viewPortHeight;
m_displayByMatrixProgram->setUniformValue(m_whRatioLoc, whRatioVal);
}
m_geometries->bindBufferForArraysInstancedDraw();
f->glDrawArraysInstanced(GL_TRIANGLE_STRIP, 0, 4, m_viewPortWidth * m_viewPortHeight);
}
And the vertex shader code is as follow:
#version 330
layout(location = 0) in vec4 vertex;
out vec3 color;
uniform mat4 mvp_matrix;
uniform mat4 projMatrix;
uniform mat4 camMatrix;
uniform mat4 worldMatrix;
uniform mat4 myMatrix;
uniform vec2 viewResolution;
uniform int whRatio;
uniform sampler2D sampler;
void main() {
int posX = gl_InstanceID % int(viewResolution.x);
int posY = gl_InstanceID / int(viewResolution.y);
if( posY % whRatio < whRatio) {
posY = gl_InstanceID / int(viewResolution.x);
}
ivec2 pos = ivec2(posX, posY);
vec2 t = vec2( pos.x * 3.0, pos.y * 3.0 );
mat4 wm = mat4(1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, t.x, t.y, 1, 1) * worldMatrix;
color = texelFetch(sampler,pos,0).rgb;
gl_Position = projMatrix * camMatrix * wm * vertex;
}
And the fragment shader is as follow:
#version 330 core
in vec3 color;
out vec4 fragColor;
void main() {
fragColor = vec4(color, 1.0);
}
However, when I move the camera far from the screen (by changing the [camera.lookAt (m_eye, m_eye + m_target, QVector3D (0, 1, 0);] "m_eye" parameter value), I got sth like this:
The space between quads is different, and the size of the quad is also different. But when I move the camera closer to the screen, it looks much better.
I think what you're seeing there is the result of rounding the coordinates to the nearest integer pixel coordinate.
To get something that looks more even, you want to use some form of anti-aliasing. The options that spring to mind are:
Enable some sort of full screen anti-aliasing like MSAA. This is simple to enable, but can have a significant performance cost.
Put your pattern in a texture, and tile that texture over a single quad. Texture filtering and mip maps should take care of the anti-aliasing for you, and it will probably be faster to render that way as well because you only need a single quad.
Related
I am new to shaders, and I want to animate an object with the vertex shader.
Right now I just want to move it with a constant. For some reason, instead of going in the x-direction of the world, it moves in the x-direction of the camera. (So whenever I turn the camera, the object rotates with me)
The project is in processing, but I don't think it affects the shader.
THE PROCESSING CODE:
PShader sdr;
void setup() {
size(1000, 1000, P3D);
noStroke();
sdr = loadShader("shdFrag.glsl", "shdVert.glsl");
}
void draw() {
background(200);
// Set camera
camera(0, -300, 700, mouseX-500, 0, 200, 0, 1, 0);
// Ground
resetShader();
beginShape();
fill(100);
vertex(-500, 0, 500);
vertex( 500, 0, 500);
vertex( 500, 0, -500);
vertex(-500, 0, -500);
endShape();
// Red Sphere
shader(sdr);
fill(255, 0, 0);
sphere(100);
}
VERTEX SHADER:
uniform mat4 transform;
attribute vec4 position;
attribute vec4 color;
out vec4 vertColor;
void main() {
vec4 pos = position;
pos.x += 300;
vertColor = color;
gl_Position = transform * pos;
}
FRAGMENT SHADER:
#ifdef GL_ES
precision mediump float;
precision mediump int;
#endif
in vec4 vertColor;
void main() {
vec4 color = vertColor;
gl_FragColor = vec4(color.xyz, 1);
}
A GIF of what is happening:
the scene with a sphere
I'm doing a simple image processing app using OpenGL and C++.
However, there is one particular thing that I don't know how to do, which is:
I need to let my user to draw a Histogram Graph.
The way I thought to do this is by creating a grid of quads one quad for each pixel intesity of my image. Example: if the image is 8 bits, I would need 256x256 quads in my grid. After drawing the grid I want my to user manually paint the quads in a quantized way (each quad) in the way that he could "draw" the histogram. The problem is that I dont know how to do any of these things...
Would anyone give me direction on how to draw the grid, and how to make the paiting thing.
Iif you're confused about "drawing histogram" just considerit as a regular graph.
You don't have to draw a grid of quads. Just one quad is enough, and then use a shader to sample from the histogram stored in a 1d-texture. Here is what I get:
Vertex shader:
#version 450 core
layout(std140, binding = 0) uniform view_block {
vec2 scale, offset;
} VIEW;
layout(std140, binding = 1) uniform draw_block {
vec4 position;
float max_value;
} DRAW;
out gl_PerVertex {
vec4 gl_Position;
};
void main()
{
ivec2 id = ivec2(gl_VertexID&1, gl_VertexID>>1);
vec2 position = vec2(DRAW.position[id.x<<1], DRAW.position[(id.y<<1) + 1]);
gl_Position = vec4(fma(position, VIEW.scale, VIEW.offset), 0, 1);
}
Fragment shader:
#version 450 core
layout(std140, binding = 1) uniform draw_block {
vec4 position;
float max_value;
} DRAW;
layout(binding = 0) uniform sampler1D hist;
layout(location = 0) out vec4 OUT;
void main()
{
const vec2 extent = DRAW.position.zw - DRAW.position.xy;
vec2 texcoord = (gl_FragCoord.xy - DRAW.position.xy)/(DRAW.position.zw - DRAW.position.xy);
OUT.rgb = vec3(lessThan(texcoord.yyy*DRAW.max_value, texture(hist, texcoord.x).rgb));
OUT.a = 1;
}
Histogram texture creation:
image hist(256, 1, 3, type_float);
// ... calculate the histogram ...
tex.reset(glCreateTextureSN(GL_TEXTURE_1D));
glTextureStorage1D(tex.get(), 1, GL_RGB32F, hist.w);
glTextureSubImage1D(tex.get(), 0, 0, hist.w, GL_RGB, GL_FLOAT, hist.c[0]);
glTextureParameteri(tex.get(), GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
Rendering routine:
const vec2i vs = { glutGet(GLUT_WINDOW_WIDTH), glutGet(GLUT_WINDOW_HEIGHT) };
glViewport(0, 0, vs[0], vs[1]);
glClear(GL_COLOR_BUFFER_BIT);
struct view_block {
vec2f scale, offset;
} VIEW = {
vec2f(2)/vec2f(vs), -vec2f(1)
};
GLbuffer view_buf(glCreateBufferStorageSN(sizeof(VIEW), &VIEW, 0));
glBindBufferBase(GL_UNIFORM_BUFFER, 0, view_buf.get());
struct draw_block {
box2f position;
float max_value;
} DRAW = {
box2f(0, 0, vs[0], vs[1]),
max_value
};
GLbuffer draw_buf(glCreateBufferStorageSN(sizeof(DRAW), &DRAW, 0));
glBindBufferBase(GL_UNIFORM_BUFFER, 1, draw_buf.get());
bind_textures(tex.get());
glBindProgramPipeline(pp.get());
glBindVertexArray(0);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
glutSwapBuffers();
EDITED
I am beginner on Processing language and GLSL shaders. I am trying to port a fresnel+cubemap shader for a glass material. But as result my shape ever disappear, instead... :-(
My vertex shader is:
const float Air = 1.0;
const float Glass = 1.51714;
const float Eta = Air / Glass;
const float R0 = ((Air - Glass) * (Air - Glass)) / ((Air + Glass) * (Air + Glass));
uniform mat4 transform;
uniform mat4 modelview;
uniform mat3 normalMatrix;
attribute vec4 vertex;
attribute vec3 normal;
varying vec3 v_reflection;
varying vec3 v_refraction;
varying float v_fresnel;
void main(void){
vec4 t_vertex = modelview * vertex;
vec3 incident = normalize(vec3(t_vertex));
vec3 t_normal = normalMatrix * normal;
v_refraction = refract(incident, t_normal, Eta);
v_reflection = reflect(incident, t_normal);
v_fresnel = R0 + (1.0 - R0) * pow((1.0 - dot(-incident, t_normal)), 5.0);
gl_Position = transform * t_vertex;
}
And the fragment shader:
#ifdef GL_ES
precision mediump float;
precision mediump int;
#endif
uniform samplerCube cubemap;
varying vec3 v_refraction;
varying vec3 v_reflection;
varying float v_fresnel;
void main(void){
vec4 refractionColor = textureCube(cubemap, normalize(v_refraction));
vec4 reflectionColor = textureCube(cubemap, normalize(v_reflection));
gl_FragColor = mix(refractionColor, reflectionColor, v_fresnel);
}
I am testing this shader with the Processing 3.0 sketch bellow (edited), on Android Mode:
PShader shader;
PShape sphere;
void setup() {
fullScreen(P3D);
noStroke();
shader = loadShader("glass.frag.glsl", "glass.vert.glsl");
openCubeMap("posx.png", "negx.png", "posy.png", "negy.png", "posz.png", "negz.png");
shader.set("cubemap", 1);
sphere = createShape(SPHERE, 120);
sphere.setFill(color(-1, 50));
}
void draw() {
background(0);
directionalLight(102, 102, 102, 0, 0, -1);
lightSpecular(204, 204, 204);
directionalLight(102, 102, 102, 0, 1, -1);
specular(100, 150, 150);
translate(width / 2, height / 2);
shader(shader);
shape(sphere);
}
void openCubeMap(String posX, String negX, String posY, String negY, String posZ, String negZ) {
PGL pgl = beginPGL();
// create the OpenGL-based cubeMap
IntBuffer envMapTextureID = IntBuffer.allocate(1);
pgl.genTextures(1, envMapTextureID);
pgl.activeTexture(PGL.TEXTURE1);
pgl.enable(PGL.TEXTURE_CUBE_MAP);
pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, envMapTextureID.get(0));
pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_S, PGL.CLAMP_TO_EDGE);
pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_T, PGL.CLAMP_TO_EDGE);
pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_R, PGL.CLAMP_TO_EDGE);
pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_MIN_FILTER, PGL.LINEAR);
pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_MAG_FILTER, PGL.LINEAR);
//Load in textures
String[] textureNames = { posX, negX, posY, negY, posZ, negZ };
for (int i=0; i<textureNames.length; i++) {
PImage texture = loadImage(textureNames[i]);
int w = texture.width;
int h = texture.height;
texture.loadPixels();
pgl.texImage2D(PGL.TEXTURE_CUBE_MAP_POSITIVE_X + i, 0, PGL.RGBA, w, h, 0, PGL.RGBA, PGL.UNSIGNED_BYTE, IntBuffer.wrap(texture.pixels));
}
endPGL();
}
And I am using this images to build the cubemap.
Someone know how I can make this work?
Problem is not in your code but in your data.
OpenGL requires that all textures used by the cubemap have the same dimensions, and that the textures be square otherwise it will refuse to load it.
I checked your PNGs and this is not the case, they all have the same dimension but they are not square (255x230).
Also for Android it may be required that the texture dimensions be a power of 2 (128, 256, 512 ...)
So I tested resizing all the textures to 256x256 pixels and now your sample works :
I'm trying to move a triangle based on time using a matrix. But it does some weird stuff:
What it should do:
move on the x-axis
What it does:
The top point of the triangle is fixed and the other points seem to move around it in a circular movement and scale on the x, z axis (I'm still in 2d so I don't have depth).
My C++ Code:
...
GLfloat timeValue = glfwGetTime();
GLfloat offset = (sin(timeValue * 4) / 2);
GLfloat matrix[16] = {
1, 0, 0, offset,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
};
GLuint uniform_m_transform = glGetUniformLocation(shader_program, "m_transform");
glUniformMatrix4fv(uniform_m_transform, 1, GL_FALSE, matrix);
...
My vertex shader:
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec3 color;
out vec3 ourColor;
uniform mat4 m_transform;
void main()
{
ourColor = color;
gl_Position = m_transform * vec4(position, 1.0);
}
I don't know what I did wrong, according to the tutorial the matrix attribute I've set to offset should change the x-translation.
Do you know what's my mistake?
you are providing a row-major matrix, so you need to specify the transpose:
glUniformMatrix4fv(uniform_m_transform, 1, GL_TRUE, matrix);
Reference: glUniform, check the transpose parameter.
As a test, I created a simple quad. Here are its attributes:
Vertex vertices[] =
{
// Positions Normals
{vec3(-1,-1, 0), vec3(-1,-1, 1)}, // v0
{vec3( 1,-1, 0), vec3( 1,-1, 1)}, // v1
{vec3(-1, 1, 0), vec3(-1, 1, 1)}, // v2
{vec3( 1, 1, 0), vec3( 1, 1, 1)}, // v3
};
And I put it in my world space at (0.0, 0.0, -9.5). Then I put my point light position at (0.0, 0.0, -8.0). My camera is at the origin (0.0, 0.0, 0.0). When I run my program, this works as expected:
But then, when I replace this quad with 9 scaled down quads, put them all at -9.5 on Z (in other word, they are all parallel to each other on Z), my diffuse lighting gets a little weird
It looks like the corners are showing too much lighting, breaking the nice diffuse circle that we see on a regular quad.
Here is my shader program:
precision mediump int;
precision mediump float;
varying vec3 v_position;
varying vec3 v_normal;
#if defined(VERTEX)
uniform mat4 u_mvpMatrix;
uniform mat4 u_mvMatrix;
uniform mat3 u_normalMatrix;
attribute vec4 a_position;
attribute vec3 a_normal;
void main()
{
vec4 position = u_mvMatrix * a_position;
v_position = position.xyz / position.w;
v_normal = normalize(u_normalMatrix * a_normal);
gl_Position = u_mvpMatrix * a_position;
}
#endif // VERTEX
#if defined(FRAGMENT)
uniform vec3 u_pointLightPosition;
void main()"
{
vec3 viewDir = normalize(-v_position);
vec3 normal = normalize(v_normal);
vec3 lightPosition = u_pointLightPosition - v_position;
vec3 pointLightDir = normalize(lightPosition);
float distance = length(lightPosition);
float pointLightAttenuation = 1.0 / (1.0 + (0.25 * distance * distance));
float diffuseTerm = max(dot(pointLightDir, normal), 0.15);
gl_FragColor = vec4(diffuseTerm * pointLightAttenuation);
}
#endif // FRAGMENT
My uniforms are uploaded as followed (I'm using GLM):
const mat4 &view_matrix = getViewMatrix();
mat4 mv_matrix = view * getModelMatrix();
mat4 mvp_matrix = getProjectionMatrix() * mv_matrix;
mat3 normal_matrix = inverseTranspose(mat3(mv_matrix));
vec3 pointLightPos = vec3(view_matrix * vec4(getPointLightPos(), 1.0f));
glUniformMatrix4fv( mvpMatrixUniformID, 1, GL_FALSE, (GLfloat*)&mvp_matrix);
glUniformMatrix4fv( vpMatrixUniformID, 1, GL_FALSE, (GLfloat*)&mv_matrix);
glUniformMatrix3fv(normalMatrixUniformID, 1, GL_FALSE, (GLfloat*)&normal_matrix);
glUniform3f(pointLightPosUniformID, pointLightPos.x, pointLightPos.y, pointLightPos.z);
Am I doing anything wrong?
Thanks!
Without going too much into your code, I think everything is working just fine. I see a very similar result with a quick blender setup:
The issue is the interpolation of the normal doesn't produce a spherical bump.
It ends up being a patch like this (I simply subdivided a smooth shaded cube)...
If you want a more spherical bump, you could generate the normals implicitly in a fragment shader (for example as is done here (bottom image)), use a normal map, or use more tessellated geometry such as an actual sphere.