Issues with shaders in Qt/OpenGL - c++

How can I use different color outputs within a fragment shader?
Say, my vshader looks like this:
#version 330
uniform mat4 mvpmatrix;
layout(location=0) in vec4 position;
layout(location=1) in vec2 texcoord;
out vec2 out_texcoord;
void main()
{
gl_Position = mvpmatrix * position;
out_texcoord = texcoord;
}
// fshader
#version 330
uniform sampler2D texture;
in vec2 out_texcoord;
out vec4 out_color;
out vec4 out_color2;
void main()
{
out_color = texture2D(texture, out_texcoord);
// out_color2 = vec3(1.0, 1.0, 1.0, 1.0);
}
Accessing them like so:
m_program->enableAttributeArray(0); // position
m_program->setAttributeBuffer(0, GL_FLOAT, 0, 3, sizeof(Data));
m_program->enableAttributeArray(1); // texture
m_program->setAttributeBuffer(1, GL_FLOAT, sizeof(QVector3D), 2, sizeof(Data));
So far, everything uses the default output of the fragment shader, which is a texture. But how can access different fragment outputs ? Do I have to use layouts as well there? And, its probably a dumb question...but are layout locations of the vshader/fshader bound to each other? So, if I'm enabling my buffer on AttributeArray(1), i'm forced to use layout location 1 of BOTH shaders?

You can bind another attribute location for sending color information to your fragment shader any time but let me show you another trick :)
I use 2 attribute location, one to represent the location of the vertex and the other one to represent the color of the vertex.
glBindAttribLocation(program_, 0, "vs_in_pos");
glBindAttribLocation(program_, 1, "vs_in_col");
This is my mesh definition, where Vertex contain two 3D vector:
Vertex vertices[] = {
{glm::vec3(-1, -1, 1), glm::vec3(1, 0, 0)},
{glm::vec3(1, -1, 1), glm::vec3(1, 0, 0)},
{glm::vec3(-1, 1, 1), glm::vec3(1, 0, 0)},
{glm::vec3(1, 1, 1), glm::vec3(1, 0, 0)},
{glm::vec3(-1, -1, -1), glm::vec3(0, 1, 0)},
{glm::vec3(1, -1, -1), glm::vec3(0, 1, 0)},
{glm::vec3(-1, 1, -1), glm::vec3(0, 1, 0)},
{glm::vec3(1, 1, -1), glm::vec3(0, 1, 0)},
};
GLushort indices[] = {
// Front
0, 1, 2, 2, 1, 3,
// Back
4, 6, 5, 6, 7, 5,
// Top
2, 3, 7, 2, 7, 6,
// Bottom
0, 5, 1, 0, 4, 5,
// Left
0, 2, 4, 4, 2, 6,
// Right
1, 5, 3, 5, 7, 3
};
This will represent a cube. I will mix this pre-defined color with a calculated value. This means the color of the cube will be changed due to its position. Set up a 3D vector for RGB values and set up to use it in the fragment shader:
loc_col_ = glGetUniformLocation(program_, "color");
Now in my render function I place the cubes in a 2D circle, moving them, rotating them:
for (int i = 0; i < num_of_cubes_; ++i) {
double fi = 2 * PI * (i / (double) num_of_cubes_);
glm::mat4 position = glm::translate<float>(cubes_radius_ * cos(fi), cubes_radius_ * sin(fi), 0);
glm::mat4 crackle = glm::translate<float>(0, 0.1 * (sin(2 * PI * (SDL_GetTicks() / 500.0) + i)), 0);
glm::mat4 rotate = glm::rotate<float>(360 * (SDL_GetTicks() / 16000.0), 0, 0, 1);
world_ = position * crackle * rotate;
glm::vec3 color = glm::vec3((1 + cos(fi)) * 0.5, (1 + sin(fi)) * 0.5, 1 - ((1 + cos(fi)) * 0.5));
glUniformMatrix4fv(loc_world_, 1, GL_FALSE, &(world_[0][0]));
glUniform3fv(loc_col_, 1, &(color[0]));
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_SHORT, 0);
}
You can see here I send not only the world matrix, but the color vector as well.
Linear interpolation in the fragment shader is achived by the mix() function:
#version 130
in vec3 vs_out_col;
in vec3 vs_out_pos;
out vec4 fs_out_col;
uniform vec3 color;
void main() {
fs_out_col = vec4(mix(color, vs_out_col, 0.5), 1);
}
Color is a value passed in the render while vs_out_col coming from the vertex shader which was arrived there in "channel" 1.
I hope you can understand me.

Layout locations on vertex and fragment shaders are independent. QT may be misleading with enableAttributeArray because in OpenGL this function is called glEnableVertexAttribArray - vertex is the keyword here. So you can pass per vertex data only into vertex shader, and then pass it into fragment shader using in/out (interpolation).
If you want to use multiple outputs from fragment shader you have to use locations and Output buffers.
This link should also be helpful, I'll summarize it later.

Related

Issue with passing integer vertex attributes with "in" keyword

I'm working on bone animation. I have a vertex struct that basically looks like
struct MeshVertex
{
glm::vec3 pos;
glm::vec3 normal;
glm::vec2 tex;
glm::vec3 tangent;
glm::vec3 bitangent;
uint32_t ids[4] = {};
float weights[4] = {};
void print() const;
};
The mesh is a basic cube with one bone. Therefore ids = {0,0,0,0} and weights = {1.0f,0.0f,0.0f,0.0f} for every single vertex. In my mesh class I have a static function Mesh::genFormat() that handles attributes. vao is a static int in the mesh class and for_i is just a convenient macro I use to do for loops. Note that I correctly use glVertexArrayAttribIFormat.
Mesh::Mesh(const std::vector<MeshVertex>& vertices, const std::vector<uint>& indices, const std::vector<Texture>& textures)
{
m_textures = textures;
m_num_indices = indices.size();
// create vertex and index buffers
glCreateBuffers(1, &m_vbo);
glCreateBuffers(1, &m_ibo);
glNamedBufferData(m_vbo, sizeof(MeshVertex) * vertices.size(), &vertices[0], GL_STATIC_DRAW);
glNamedBufferData(m_ibo, sizeof(uint) * indices.size(), &indices[0], GL_STATIC_DRAW);
}
void Mesh::genFormat()
{
glCreateVertexArrays(1, &vao);
for_i(7) { glEnableVertexArrayAttrib(vao, i); }
glVertexArrayAttribFormat(vao, 0, 3, GL_FLOAT, false, offsetof(MeshVertex, pos)));
glVertexArrayAttribFormat(vao, 1, 3, GL_FLOAT, false, offsetof(MeshVertex, normal));
glVertexArrayAttribFormat(vao, 2, 2, GL_FLOAT, false, offsetof(MeshVertex, tex));
glVertexArrayAttribFormat(vao, 3, 3, GL_FLOAT, false, offsetof(MeshVertex, tangent));
glVertexArrayAttribFormat(vao, 4, 3, GL_FLOAT, false, offsetof(MeshVertex, bitangent));
glVertexArrayAttribIFormat(vao, 5, 4, GL_UNSIGNED_INT, offsetof(MeshVertex, ids)));
glVertexArrayAttribFormat(vao, 6, 4, GL_FLOAT, false, offsetof(MeshVertex, weights)));
for_i(7) { glVertexArrayAttribBinding(vao, i, 0); }
glBindVertexArray(0);
}
The following GLSL won't render anything.
#version 460 core
layout(location = 0) in vec3 Pos;
layout(location = 1) in vec3 Normal;
layout(location = 2) in vec2 Tex;
layout(location = 3) in vec3 Tan;
layout(location = 4) in vec3 BiTan;
layout(location = 5) in uvec4 BoneIds;
layout(location = 6) in vec4 Weights;
out vec3 normal;
out vec2 tex;
layout(binding = 2, std140) uniform Camera
{
mat4 VP;
vec4 cpos;
};
uniform mat4 node;
uniform mat4 bones_inverse_bind_mesh_parent[50];
void main()
{
tex = Tex;
mat4 W = mat4(0.0f);
if (Weights[0] != 0.0f)
{
for (uint i = 0; i < 4; i++)
W = W + (Weights[i] * bones_inverse_bind_mesh_parent[BoneIds[i]]);
W = node * W;
}
else
W = node;
gl_Position = VP * W * vec4(Pos, 1.0);
}
Since BoneIds[i] is always zero, if I replace
W = W + (Weights[i] * bones_inverse_bind_mesh_parent[BoneIds[i]]);
with
W = W + (Weights[i] * bones_inverse_bind_mesh_parent[0]);
the result should be unchanged. My matrix transforms are currently a bit off (something to fix later), but now the cube renders fine. So there is something wrong with BoneIds. After bashing my head against the wall on this for a while, I instead replaced
layout(location = 5) in uvec4 BoneIds;
with
layout(location = 5) varying uvec4 BoneIds;
after seeing some old GLSL online, and now everything works. What I don't understand is why. I've seen plenty of GLSL code on the internet work with integer attributes using the in keyword.
UPDATE :
If I replace glVertexArrayAttribIFormat in Mesh::genFormat() with
glVertexArrayAttribFormat(vao, 5, 4, GL_UNSIGNED_INT, false, offsetof(MeshVertex, ids));
in C++ and
layout(location = 5) in vec4 BoneIds;
in GLSL and cast bone ids from float to int in the glsl code, the code also works.
Okay I solved the issue, even though I don't quite understand how this fixes the problem. My preferred graphics processor was on auto but when I forced it to use the NVIDIA processor over my integrated graphics, everything works out fine. image of solution
Update :
I think it is as simple as my Intel processor graphics supporting OpenGL 4.4 and glVertexArrayAttribIFormat came about in OpenGL 4.5.

Opengl: How to map a buffer CORRECTLY?

I'm new to stack overflow. I have the following problem: I want to have a rotating triangle in my window.
Now, I have already managed to have my code running and I had a rotating triangle. However, I wanted to rewrite the code for learning purposes, adding the two following major things:
Updating the buffer object later on with memcpy
Having an array-of-stuctures (AoS) instead of a structure-of-arrays (SoA)
I'm referring hereby to the "OpenGL Superbible" book.
I'll provide you some code snippets:
glGenVertexArrays(1, &vao);
static const vertex vertices[] = {
{ 0.25, -0.25, 0.5, 1.0 ,
1.0, 0.0, 0.0, 1.0},
{ -0.25, -0.25, 0.5, 1.0,
0.0, 1.0, 0.0, 1.0 },
{ 0.25, 0.25, 0.5, 1.0,
0.0, 0.0, 1.0, 1.0 }
};
glCreateBuffers(1, &buffer);
glNamedBufferStorage(buffer, sizeof(vertices), NULL, 0);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
void * ptr = glMapNamedBuffer(buffer, GL_WRITE_ONLY);
memcpy(ptr, vertices, sizeof(vertices));
glUnmapNamedBuffer(GL_ARRAY_BUFFER);
glVertexArrayVertexBuffer(vao, 0, buffer, 0, sizeof(vmath::vec4));
// Positions
glVertexArrayAttribBinding(vao, 0, 0);
glVertexArrayAttribFormat(vao, 0, 4, GL_FLOAT, GL_FALSE, offsetof(vertex, x));
glEnableVertexArrayAttrib(vao, 0);
// Color
glVertexArrayAttribBinding(vao, 1, 0);
glVertexArrayAttribFormat(vao, 1, 4, GL_FLOAT, GL_FALSE, offsetof(vertex, r));
glEnableVertexArrayAttrib(vao, 1);
glVertexArrayVertexBuffer(vao, 0, buffer, 0, sizeof(vertex));
I set up the vertex struct as follows:
struct vertex {
// Position
float x;
float y;
float z;
float w;
// Color
float r;
float g;
float b;
float a;
};
The first time, I had the color hard-coded in my vertex shader. And I had the position data in a data-array. I set the data directly by calling 'glNamedBufferStorage' instead of (as it is now the case) inserting NULL. Back then, it worked. But as I changed the two things, it stopped working. I know by shure that both of these major steps include some errors.
Here I'll provide you with the vertex shader. the mvp matrix works, by the way, so that's not the problem.
#version 420 core
layout (location = 0) in vec4 position;
layout (location = 1) in vec4 color;
out vec4 vs_color;
uniform mat4 mvp;
void main(void)
{
gl_Position = mvp * position;
vs_color = color;
}
Any hints would be greatly appreciated.
There are several problems in the code:
glUnmapNamedBuffer takes the buffer handle as parameter, not a GLenum. Change glUnmapNamedBuffer(GL_ARRAY_BUFFER); to glUnmapNamedBuffer(buffer);. (And you should check the return value, it returns false when there is a problem).
The stride for glVertexArrayVertexBuffer is wrong. Each vertex starts 8 floats after the previous one (or sizeof(vertex)). Change
glVertexArrayVertexBuffer(vao, 0, buffer, 0, sizeof(vmath::vec4));
to
glVertexArrayVertexBuffer(vao, 0, buffer, 0, sizeof(vertex));

OpenGL - Apply transformation to polygon in 3D space

I am trying to rotate a quad in a 3D space. The following code shows the vertex shader utilized to draw the quad:
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aColor;
out vec3 ourColor;
uniform mat4 transform;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main()
{
gl_Position = transform*(projection*view*model*vec4(aPos, 1.0f));
ourColor = aColor;
}
The quad is displayed when transform is not multiplied to projection*view*model*vec4(aPos,1.0f) but is not displayed when it is multiplied as above.
The code for transformation:
trans=glm::rotate(trans,(float)(glfwGetTime()),glm::vec3(0.0,0.0,1.0));
float scaleAmount = sin(j*0.3);j=j+0.035;
trans=glm::scale(trans,glm::vec3(scaleAmount,scaleAmount,scaleAmount));
unsigned int transformLoc = glGetUniformLocation(shaderProgram, "transform");
glUniformMatrix4fv(transformLoc, 1, GL_FALSE, glm::value_ptr(trans));
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
I have set the uniform present in the vertex shader as well.Why is it not rotating and scaling, or even appearing when I multiply transform with (projection*view*model*vec4(aPos,1.0f)) ?
Edit: I figured out that the problem is with scaling, since the code works with rotation only. The code does not work with scaling only.
Let's think only in 2D.
The quad is defined in "world" coordinates. To rotate it around some point move the quad to that point, then rotate and scale it and then move it back. Doing this with matrices is the same as transform * model where transform is something like
transform = moveback * scale * rotate * movetopoint
If scaleAmount == 0.0:
glm::mat4 trans( 1.0f );
float scaleAmount = 0.0f;
trans=glm::scale(trans,glm::vec3(scaleAmount,scaleAmount,scaleAmount));
then this would cause that trans is
{{0, 0, 0, 0}, {0, 0, 0, 0}, {0, 0, 0, 0}, {0, 0, 0, 1}}
Since sin(0.0) == 0.0 it has to be ensured that in case of sin(j*0.3);, j is not equal 0.0.

OpenGL vertex shader: weird matrix translation

I'm trying to move a triangle based on time using a matrix. But it does some weird stuff:
What it should do:
move on the x-axis
What it does:
The top point of the triangle is fixed and the other points seem to move around it in a circular movement and scale on the x, z axis (I'm still in 2d so I don't have depth).
My C++ Code:
...
GLfloat timeValue = glfwGetTime();
GLfloat offset = (sin(timeValue * 4) / 2);
GLfloat matrix[16] = {
1, 0, 0, offset,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
};
GLuint uniform_m_transform = glGetUniformLocation(shader_program, "m_transform");
glUniformMatrix4fv(uniform_m_transform, 1, GL_FALSE, matrix);
...
My vertex shader:
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec3 color;
out vec3 ourColor;
uniform mat4 m_transform;
void main()
{
ourColor = color;
gl_Position = m_transform * vec4(position, 1.0);
}
I don't know what I did wrong, according to the tutorial the matrix attribute I've set to offset should change the x-translation.
Do you know what's my mistake?
you are providing a row-major matrix, so you need to specify the transpose:
glUniformMatrix4fv(uniform_m_transform, 1, GL_TRUE, matrix);
Reference: glUniform, check the transpose parameter.

can't draw any other objects before or after drawing particles

I am working on a game, and trying to implement the instancized CPU-Particle System programmed on http://www.opengl-tutorial.org/intermediate-tutorials/billboards-particles/particles-instancing/
i managed to get it working in my code structure, but i am trying to draw other objects in the same window, which i can't, i have tested it, and it only allows me to draw one, either draw the particle system or draw the object i want.
The problem happens specifically at this code part :
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
// Use our shader
glUseProgram(particleprogramID->programHandle);
unit2 +=1;
glActiveTexture(GL_TEXTURE0 + unit2);
glBindTexture(GL_TEXTURE_2D, texture);
glUniform1i(TextureID, unit2);
glm::mat4 ViewMatrix = camera->getViewMatrix();
// Same as the billboards tutorial
glUniform3f(CameraRight_worldspace_ID, ViewMatrix[0][0], ViewMatrix[1][0], ViewMatrix[2][0]);
glUniform3f(CameraUp_worldspace_ID , ViewMatrix[0][1], ViewMatrix[1][1], ViewMatrix[2][1]);
glUniformMatrix4fv(ViewProjMatrixID, 1, GL_FALSE, &mvp[0][0]);
//glUniformMatrix4fv(modviewprojID, 1, GL_FALSE, &mvp[0][0]);
//1rst attribute buffer : vertices
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, billboard_vertex_buffer);
glVertexAttribPointer(
0,
3,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
// 2nd attribute buffer : positions of particles' centers
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, particles_position_buffer);
glVertexAttribPointer(
1,
4,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
// 3rd attribute buffer : particles' colors
glEnableVertexAttribArray(2);
glBindBuffer(GL_ARRAY_BUFFER, particles_color_buffer);
glVertexAttribPointer(
2,
4,
GL_UNSIGNED_BYTE,
GL_TRUE,
0,
(void*)0
);
glVertexAttribDivisor(0, 0);
glVertexAttribDivisor(1, 1);
glVertexAttribDivisor(2, 1);
glDrawArraysInstanced(GL_TRIANGLE_STRIP, 0, 4, ParticlesCount);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(2);
then i try to draw my star:
unit2 += 1;
starTexture->Bind(unit2);
shaderObject ->useShader();
glUniform1i(glGetUniformLocation(shaderObject->programHandle, "colorTexture"), unit2);
glUniformMatrix4fv(glGetUniformLocation(shaderObject->programHandle, "modelMatrix"), 1, GL_FALSE, glm::value_ptr(star1->getModelMatrix()));
glUniformMatrix4fv(glGetUniformLocation(shaderObject->programHandle, "projectionMatrix"), 1, GL_FALSE, glm::value_ptr(projectionViewMatrix));
star1->draw();
the vertex and fragment shader for the particle system:
#version 330 core
// Input vertex data, different for all executions of this shader.
layout(location = 0) in vec3 squareVertices;
layout(location = 1) in vec4 xyzs; // Position of the center of the particule and size of the square
layout(location = 2) in vec4 color; // Position of the center of the particule and size of the square
// Output data ; will be interpolated for each fragment.
out vec2 UV;
out vec4 particlecolor;
// Values that stay constant for the whole mesh.
uniform vec3 CameraRight_worldspace;
uniform vec3 CameraUp_worldspace;
uniform mat4 VP; // Model-View-Projection matrix, but without the Model (the position is in BillboardPos; the orientation depends on the camera)
void main()
{
float particleSize = xyzs.w; // because we encoded it this way.
vec3 particleCenter_wordspace = xyzs.xyz;
vec3 vertexPosition_worldspace =
particleCenter_wordspace
+ CameraRight_worldspace * squareVertices.x * particleSize
+ CameraUp_worldspace * squareVertices.y * particleSize;
// Output position of the vertex
gl_Position = VP * vec4(vertexPosition_worldspace, 1.0f);
// UV of the vertex. No special space for this one.
UV = squareVertices.xy + vec2(0.5, 0.5);
particlecolor = color;
}
frragment shader:
#version 330 core
// Interpolated values from the vertex shaders
in vec2 UV;
in vec4 particlecolor;
// Ouput data
out vec4 color;
uniform sampler2D myTexture;
void main(){
// Output color = color of the texture at the specified UV
color = texture2D( myTexture, UV ) * particlecolor;
}
and it only displays the particle system:
worth mentioning is:
the object i want to draw is a star modelled in blender and is displayed correctly when drawn alone or with other objects other than the particle system. and has its own class having buffers for psitions, UVs, indices and normals...
it seems like the star data are being swallowed by the buffer...
i appreciate every help...