How to send data to vertex shader with OpenGL? - c++

I've got some problem with sending data to my vertex shader :
#version 450 core
layout (location = 5) in vec3 in_Vertex;
layout (location = 6) in vec3 in_Color;
in int gl_InstanceID;
layout (location = 0) uniform mat4 projection;
layout (location = 1) uniform mat4 modelview;
layout (location = 2) uniform float posX;
layout (location = 3) uniform float posY;
layout (location = 4) uniform uint posZ;
out vec4 v_color;
void main()
{
float tmpX = posX;
float tmpY = posY;
uint tmpZ = posZ;
gl_Position = projection * modelview * vec4(in_Vertex.x + tmpX*2, in_Vertex.y + tmpY*2, in_Vertex.z + tmpZ, 1.0) + vec4(in_Color, 0.0);
v_color = vec4(in_Color,1.0);
}
The in_Vertex is working fine but the in_Color seems to have bad values, I send the data as :
glUseProgram(m_shaderPerso.getID());
glBindBuffer(GL_ARRAY_BUFFER, m_vbo);
glVertexAttribPointer(5, 3, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
glEnableVertexAttribArray(5);
glVertexAttribPointer(6, 3, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(m_offsetColor));
glEnableVertexAttribArray(6);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_ibo);
glUniformMatrix4fv(0, 1, GL_FALSE, glm::value_ptr(projection));
glUniformMatrix4fv(1, 1, GL_FALSE, glm::value_ptr(modelview));
for( auto &x : m_mapPos)
{
glUniform1f(2, x.second.getX());
glUniform1f(3, x.second.getY());
glUniform1ui(4, s_map->getHauteur(x.second.getXint(),x.second.getYint()));
glDrawElements(GL_TRIANGLES, m_tailleI, GL_UNSIGNED_BYTE, 0);
}
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glDisableVertexAttribArray(6);
glDisableVertexAttribArray(5);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glUseProgram(m_shaderPerso.getID());
And I initialize the VBO with :
glGenBuffers(1, &m_vbo);
glBindBuffer(GL_ARRAY_BUFFER, m_vbo); //Verrouillage VBO
glBufferData(GL_ARRAY_BUFFER, (tailleV * sizeof(float)) + (m_tailleI*3 * sizeof(float)), 0, GL_STATIC_DRAW); //Allocation memoire
glBufferSubData(GL_ARRAY_BUFFER, 0, tailleV * sizeof(float), m_vertices); //Transfert données vertices
glBufferSubData(GL_ARRAY_BUFFER, tailleV * sizeof(float), m_tailleI*3 * sizeof(float), m_color); //Transfert données couleurs
glBindBuffer(GL_ARRAY_BUFFER, 0);
glGenBuffers(1, &m_ibo); //Generation IBO (indices)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_ibo); //Verrouillage IBO
glBufferData(GL_ELEMENT_ARRAY_BUFFER, m_tailleI * sizeof(GLubyte), 0, GL_STATIC_DRAW); //Allocation memoire
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER, 0, m_tailleI * sizeof(GLubyte), m_indices); //Transfert données
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
I really don't know where my mistake is, so I gave you all the code that could be usefull. I do know that the arrays m_vertices, m_color and m_indices are correctly initialized, and the vertices are correctly display, so the VBO for the vertices and the indices are good in the VRAM. I probably done a mistake when I sended the data but I can't find it for 2 days, and I think that a new look could help me.
Thanks for reading and probably helping :)

Besides what #HolyBlackCat said, you are mixing interleaved mode with batch mode. When your are filling your buffer, you are using batch mode:
glBindBuffer(GL_ARRAY_BUFFER, m_vbo); //Verrouillage VBO
glBufferData(GL_ARRAY_BUFFER, (tailleV * sizeof(float)) + (m_tailleI*3 * sizeof(float)), 0, GL_STATIC_DRAW); //Allocation memoire
glBufferSubData(GL_ARRAY_BUFFER, 0, tailleV * sizeof(float), m_vertices); //Transfert données vertices
glBufferSubData(GL_ARRAY_BUFFER, tailleV * sizeof(float), m_tailleI*3 * sizeof(float), m_color); //Transfert données couleurs
First you fill with vertices points, and, at the end of the buffer, with color attributes.
But when you are using the buffer, you are using it in interleaved mode*:
glVertexAttribPointer(5, 3, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
glVertexAttribPointer(6, 3, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(m_offsetColor));
Choose one and stick with it.
*in glVertexAttribPointer function, stride can have an incorrect value.
Probably, you can solve your problem by changing:
glVertexAttribPointer(6, 3, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(m_offsetColor))
to:
glVertexAttribPointer(6, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), BUFFER_OFFSET(tailleV * sizeof(float)))

Related

OpenGL displaying white screen while displaying multiple objects

I was trying to implement normal mapping on a simple cube but since i had troubles with my normals i wanted to try to display them using a geometry shader. Following learnopengl tutorial, it basically calls mesh.render() twice, the first time to draw the model and the second time to display normals. When i try to do the same i get this
The cube seems well drawn but it has a weird white rectangle in front of it and i don't know why. I don't know if it's a problem of drawings or if it's the geometry shader, so i'll post both.
My code:
glutDisplayFunc(MyRenderScene);
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glFrontFace(GL_CW);
glGenVertexArrays(1,&global.gVAO);
glGenBuffers(1, &global.VBO);
glBindVertexArray(global.VAO);
glBindBuffer(GL_ARRAY_BUFFER, global.VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glGenBuffers(1, &global.IBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, global.IBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
glGenVertexArrays(1,&global.gVAO);
glGenBuffers(1, &global.gVBO);
glBindVertexArray(global.gVAO);
glBindBuffer(GL_ARRAY_BUFFER, global.gVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glGenBuffers(1, &global.gIBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, global.gIBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
Here i'm basically passing the same structure to both buffers.
Buffers are istantiated in a global struct.
This is MyRenderScene() :
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
global.shaders.enable(); // glUseProgram
global.shaders.set_sampler(0); // setting textures
global.shaders.set_sampler(1);
global.sceneT.set_camera(
global.camera.position(),
global.camera.lookat(),
global.camera.up()
);
glm::mat4 model = glm::mat4(1.0f);
glm::vec3 vaxis = glm::vec3(0.0,1.0,0.0);
glm::vec3 haxis = glm::vec3(1.0,0.0,0.0);
model = glm::rotate(model,glm::radians(global.gradX),haxis);
model = glm::rotate(model,glm::radians(global.gradY),vaxis);
glm::mat4 projection = glm::perspective(glm::radians(40.0f), (float)global.WINDOW_WIDTH/(float)global.WINDOW_HEIGHT, 0.1f, 100.0f);
glm::mat4 view = glm::lookAt(global.camera.position(),global.camera.lookat()+ global.camera.position(),global.camera.up());
global.shaders.set_model(model);
global.shaders.set_view(view);
global.shaders.set_projection(projection);
global.shaders.set_viewPos(global.camera.position());
global.shaders.set_lightPos(lightPos);
global.shaders.update_uniforms();
glBindVertexArray(global.VAO);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, position)));
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, textcoord)));
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, normal)));
glEnableVertexAttribArray(3);
glVertexAttribPointer(3, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, tangent)));
glBindBuffer(GL_ARRAY_BUFFER, global.VBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, global.IBO);
global.brickwall.Bind(GL_TEXTURE0+0); // binding textures
global.brickwall_normals.Bind(GL_TEXTURE0+1);
glDrawElements(GL_TRIANGLES,36,GL_UNSIGNED_INT,0);
glBindVertexArray(0);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(2);
glDisableVertexAttribArray(3);
global.geometryShader.enable(); // setting up geometry shader
global.geometryShader.set_projection(projection);
global.geometryShader.set_model(model);
global.geometryShader.set_view(view);
global.geometryShader.update_uniforms();
glBindVertexArray(global.gVAO);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, position)));
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, textcoord)));
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, normal)));
glEnableVertexAttribArray(3);
glVertexAttribPointer(3, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex),
reinterpret_cast<GLvoid*>(offsetof(struct Vertex, tangent)));
glBindBuffer(GL_ARRAY_BUFFER, global.gVBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, global.gIBO);
glDrawElements(GL_TRIANGLES,36,GL_UNSIGNED_INT,0);
glBindVertexArray(0);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(2);
glDisableVertexAttribArray(3);
glutSwapBuffers();
glutPostRedisplay();***
I even tried calling the same vertexArrays and vertexArrayBuffer but i get the same result.
Here are vertex shader:
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec2 textcoord;
layout (location = 2) in vec3 normal;
layout (location = 3) in vec3 tangent;
out VS_OUT {
vec3 newNormal;
} vs_out;
uniform mat4 view;
uniform mat4 model;
void main() {
mat3 normalMatrix = mat3(transpose(inverse(view * model)));
newNormal = vec3(vec4(normalMatrix * normal, 0.0));
gl_Position = view * model * vec4(position, 1.0);
}
And geometryShader:
#version 330 core
layout (triangles) in;
layout (line_strip, max_vertices = 6) out;
in VS_OUT {
vec3 newNormal;
} gs_in[];
const float MAGNITUDE = 0.2;
uniform mat4 projection;
void GenerateLine(int index) {
gl_Position = projection * gl_in[index].gl_Position;
EmitVertex();
gl_Position = projection * (gl_in[index].gl_Position + vec4(gs_in[index].newNormal,0.0) * MAGNITUDE);
EmitVertex();
EndPrimitive();
}
void main() {
GenerateLine(0); // first vertex normal
GenerateLine(1); // second vertex normal
GenerateLine(2); // third vertex normal
}
Feel free to correct me on everything possible and imaginable.
The reason for the issue is, that the shader program with the geometry shader fails to compile or link. Hence the geometry is drawn by the default shader program instead of the program with the geometry shader.
There is (at least) one error in the vertex shader:
newNormal = vec3(vec4(normalMatrix * normal, 0.0));
vs_out.newNormal = vec3(vec4(normalMatrix * normal, 0.0));

Opengl 3.1 GLSL 140 Outputting White In Fragment Shader in C++ [closed]

Closed. This question is not reproducible or was caused by typos. It is not currently accepting answers.
This question was caused by a typo or a problem that can no longer be reproduced. While similar questions may be on-topic here, this one was resolved in a way less likely to help future readers.
Closed 1 year ago.
Improve this question
I am stuck using a laptop with no support for OpenGL 3.3 or anything higher than 3.1. This is problematic as I am not entirely familiar with this version so I am learning how to use things again. My problem is that for some reason, my fragment shader is only outputting in white. I'm not quite sure what's wrong, but I get the feeling that it has something to do with the way I set the VBO data as this has recently changed in my code. Right now, I'm trying to convert my old rendering engine (written in OpenGL 3.3) to OpenGL 3.1. This is because of the limitations set by my old laptop. This is why I am using a struct for the Vertex which includes BiTangent and Tangent values.
My Code:
struct Vertex {
// Position
glm::vec3 Position;
// Normals
glm::vec3 Normal;
// Texture Coordinates
glm::vec2 TexCoords;
// BiTangent
glm::vec3 BiTangent;
// Tangent
glm::vec3 Tangent;
};
struct Texture {
unsigned int id;
int number;
};
class Mesh {
std::vector<Vertex> vertices;
std::vector<unsigned int> indices;
std::vector<Texture> textures;
unsigned int VAO, VBO[5], EBO;
public:
Mesh(std::vector<Vertex> vertices, std::vector<unsigned int> indices, std::vector<Texture> textures) {
this->vertices = vertices;
this->indices = indices;
this->textures = textures;
setupMesh();
};
void setupMesh() {
glGenVertexArrays(1, &VAO);
glGenBuffers(4, VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
int stride = 14;
// Position
glBindBuffer(GL_ARRAY_BUFFER, VBO[0]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
// Normal
glBindBuffer(GL_ARRAY_BUFFER, VBO[1]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)3);
glEnableVertexAttribArray(1);
// Texture Coordinates
glBindBuffer(GL_ARRAY_BUFFER, VBO[2]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)6);
glEnableVertexAttribArray(2);
// BiTangent
glBindBuffer(GL_ARRAY_BUFFER, VBO[3]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(3, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)8);
glEnableVertexAttribArray(3);
// Tangent
glBindBuffer(GL_ARRAY_BUFFER, VBO[4]);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(Vertex), &vertices[0], GL_STATIC_DRAW);
glVertexAttribPointer(4, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)11);
glEnableVertexAttribArray(4);
// Element Buffer Object
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned int), &indices[0], GL_STATIC_DRAW);
// Unbind
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
};
void Draw(ShaderProgram shader) {
for (int i = 0; i < textures.size(); i++) {
// Give textures the proper id and bind them
glActiveTexture(GL_TEXTURE0 + textures[i].number);
glBindTexture(GL_TEXTURE_2D, textures[i].id);
}
// Draw
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLES, indices.size(), GL_UNSIGNED_INT, 0);
// Unbind
glBindVertexArray(0);
}
};
Vertex Shader:
#version 140 core
in vec3 aPos;
in vec3 aColor;
in vec2 aTexCoords;
in vec3 aBiTangent;
in vec3 aTangent;
out vec3 Color;
void main(void){
Color = vec3(aColor);
gl_Position = vec4(aPos, 1.0);
}
Fragment Shader:
#version 140 core
out vec4 FragColor;
//precision highp float;
in vec3 Color;
void main(void)
{
FragColor = vec4(1.0, 0.8, 0.8, 1.0);
}
I tried manually setting a color to override displaying the normals, but the output is still white.
EDIT:
If I can generate one VBO instead of 4, that would be useful. My question for that is how I'm supposed to access the vertex data from the shaders when using one VBO.
If a named buffer object is bound, then the last parameter of glVertexAttribPointer is treated as a byte offset into the buffer object's data store.
Hence the offset has to be sizeof(float)*number rather than number:
For instance:
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)3);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE,
stride * sizeof(float), (void*)(sizeof(float) * 3));
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, stride * sizeof(float), (void*)6);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE,
stride * sizeof(float), (void*)(sizeof(float) * 6));
...
Furthermore, the version specification is invalide. There is no GLSL 1.40 core version. Remove the token core in the vertex and in the fragment shader:
#version 140 core
#version 140
core is introduced in OpenGL Shading Language 1.50.
I recommend to check if the shader compilation succeeded and if the program object linked successfully. See Shader Compilation.
If the compiling of a shader succeeded can be checked by glGetShaderiv and the parameter GL_COMPILE_STATUS. If the linking of a program was successful can be checked by glGetProgramiv and the parameter GL_LINK_STATUS.

Adding line to shader makes nothing draw

I have this vertex shader. When i remove in vec3 LVertexNorm and everything related to it. It render fine. But if i add it in, nothing renders anymore.
#version 140
in vec3 LVertexNorm;
in vec3 LVertexPos2D;
uniform mat4 MVP;
out vec3 norm;
void main() {
norm = LVertexNorm;
gl_Position = MVP * vec4( LVertexPos2D.x, LVertexPos2D.y, LVertexPos2D.z, 1 );
}
Fragment shader
#version 140
in vec3 norm;
out vec4 LFragment;
void main() {
LFragment = vec4( 1.0,1.0,1.0, 1.0 );
}
And code for building the VAO
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, verticesCount * sizeof(GLfloat), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(program->getAttribute("LVertexPos2D"), 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), NULL);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, nbo);
glBufferData(GL_ARRAY_BUFFER, normalCount * sizeof(GLfloat), normals, GL_STATIC_DRAW);
glVertexAttribPointer(program->getAttribute("LVertexNorm"), 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), NULL);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indicesCount * sizeof(GLuint), indices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBindVertexArray(0);
I tryd different ways. And always the same result. when LVertexNorm gets added, shader stops working. I cant ifgure out why. What might be wrong?
The argument to glEnableVertexAttribArray has to be the vertex attribute index:
GLuint pos_inx = program->getAttribute("LVertexPos2D")
glVertexAttribPointer(pos_inx, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), NULL);
glEnableVertexAttribArray(pos_inx);
GLuint norm_inx = program->getAttribute("LVertexNorm");
glVertexAttribPointer(norm_inx, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), NULL);
glEnableVertexAttribArray(norm_inx);
When you have the vertex shader input variables in vec3 LVertexNorm; and in vec3 LVertexPos2D;, then possibly LVertexNorm gets the attribute index 0 and LVertexPos2D gets the attribute index 1. Since the vertex attribute 1 is not enabled, the vertex positions are not specified.
In fact, the attribute indexes are not specified and can be any number. Most drivers, however, use ascending indexes that start at 0.

Sending single unsinged int to VBO

I am trying to send a single unsigned int to vbo, but when I test its value in the vertex shader (which should be 1), the value is different from the one expected.
The variable that contains my unsigned ints is textureIds
The code that load the vbo :
std::vector<unsigned int> textureIds;
glGenBuffers(1, &vboID_m);
glBindBuffer(GL_ARRAY_BUFFER, vboID_m);
{
glBufferData(GL_ARRAY_BUFFER,
(vertices.size() + texture.size() + normals.size())
* sizeof(float) + textureIds.size() * sizeof(unsigned int), 0,
GL_STATIC_DRAW);
glBufferSubData(GL_ARRAY_BUFFER, 0, vertices.size() * sizeof(float),
vertices.data());
glBufferSubData(GL_ARRAY_BUFFER, vertices.size() * sizeof(float),
texture.size() * sizeof(float), texture.data());
glBufferSubData(GL_ARRAY_BUFFER,
(vertices.size() + texture.size()) * sizeof(float),
normals.size() * sizeof(float), normals.data());
glBufferSubData(GL_ARRAY_BUFFER,
(vertices.size() + texture.size() + normals.size()) * sizeof(float),
textureIds.size() * sizeof(unsigned int), textureIds.data());
}
glBindBuffer(GL_ARRAY_BUFFER, 0);
The code that draw the vbo:
void Chunck::draw() const {
glBindBuffer(GL_ARRAY_BUFFER, vboID_m);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float),
BUFFER_OFFSET(0));
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float),
BUFFER_OFFSET(verticeSize_m * sizeof(float)));
glEnableVertexAttribArray(1);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float),
BUFFER_OFFSET(
(verticeSize_m + textureSize_m) * sizeof(float)));
glEnableVertexAttribArray(2);
glVertexAttribPointer(3, 1, GL_UNSIGNED_INT, GL_FALSE, sizeof(unsigned int),
BUFFER_OFFSET(
(verticeSize_m + textureSize_m + normalSize_m) * sizeof(float)));
glEnableVertexAttribArray(3);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, grassTexture_m.getID());
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, dirtTexture_m.getID());
glDrawArrays(GL_TRIANGLES, 0, verticeSize_m / 3);
glBindTexture(GL_TEXTURE_2D, 0);
glDisableVertexAttribArray(2);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
The vertex shader:
#version 330 core
in vec3 in_Vertex;
in vec2 in_TexCoord0;
in vec3 in_normal;
in int in_textureId;
uniform mat4 projection;
uniform mat4 model;
uniform mat4 view;
out vec2 coordTexture;
out vec3 normal;
out vec3 FragPos;
out vec3 rayDir;
out float grassAmount;
out float dirtAmount;
void main()
{
vec4 pos = model * vec4(in_Vertex, 1.0);
vec2 mu = vec2(0., 0.);
//pos.y = max(pos.y, 10 * exp(-((pos.x - mu.x) * (pos.x - mu.x) + (pos.z - mu.y) * (pos.z - mu.y)) / 100.));
gl_Position = projection * view * pos;
FragPos = vec3(pos);
coordTexture = in_TexCoord0;
normal = in_normal;
rayDir = (view * model * vec4(FragPos, 1.)).xyz;
grassAmount = 0;
dirtAmount = 0;
if (in_textureId == 0)
grassAmount = 1;
else if (in_textureId == 1)
dirtAmount = 1;
}
I should enter into the second if, but it doesn't :\
You have to use glVertexAttribIPointer (focus on I), when defining the array of generic vertex attribute data, for the vertex attribute in int in_textureId;.
Vertex attribute data defined by glVertexAttribPointer will be converted to floating point.
See OpenGL 4.6 API Core Profile Specification; 10.2. CURRENT VERTEX ATTRIBUTE VALUES; page 344
The VertexAttribI* commands specify signed or unsigned fixed-point values
that are stored as signed or unsigned integers, respectively. Such values are referred to as pure integers.
...
All other VertexAttrib* commands specify values that are converted directly to the internal floating-point representation.
Note, you should use either Layout Qualifier to specify the attribute index in the vertex shader:
layout(location = 0) in vec3 in_Vertex;
layout(location = 1) in vec2 in_TexCoord0;
layout(location = 2) in vec3 in_normal;
layout(location = 3) in int in_textureId;
or you should ask for the attribute index by glGetAttribLocation aftet the program has been linked:
e.g.:
GLuint progObj = ...;
glLinkProgram( progObj );
GLint texIdInx = glGetAttribLocation( progObj, "in_textureId" );
glVertexAttribIPointer(
texIdInx, 1, GL_UNSIGNED_INT, GL_FALSE, sizeof(unsigned int),
BUFFER_OFFSET((verticeSize_m + textureSize_m + normalSize_m) * sizeof(float)));
glEnableVertexAttribArray(texIdInx );
Edit:
Of course, glBindAttribLocation is a proper solution too.

OpenGL Transform Feedback not returning anything

I am working on a Project to create Geometry by using a Geometry Shader and Transform Feedback.
I am currently trying to have the Geometry Shader return its input (1 triangle) without changing/adding anything, but its not working.
I would appreciate any help/advice I can get. Here are parts of my code:
The creation of the Program for Transform Feedback:
//==========================================
// Create the Transform Program
//==========================================
int check = LoadShader("Shaders//transformVS.glsl", GL_VERTEX_SHADER, transformVS);
//TODO: check for fail
check = LoadShader("Shaders//transformGS.glsl", GL_GEOMETRY_SHADER, transformGS);
//TODO: check for fail
transformProgram = glCreateProgram();
glAttachShader(transformProgram, transformVS);
glAttachShader(transformProgram, transformGS);
glBindAttribLocation(transformProgram, 0, "position_in");
glBindAttribLocation(transformProgram, 1, "normal_in");
glBindAttribLocation(transformProgram, 2, "length_in");
static const char* varyings[] = { "position_out", "normal_out", "length_out" };
glTransformFeedbackVaryings(transformProgram, 3, varyings, GL_INTERLEAVED_ATTRIBS);
glLinkProgram(transformProgram);
if (CheckProgram(transformProgram) == -1){
glDetachShader(transformProgram, transformVS);
glDetachShader(transformProgram, transformGS);
glDeleteShader(transformVS);
transformVS = 0;
glDeleteShader(transformGS);
transformGS = 0;
glDeleteProgram(transformProgram);
transformProgram = 0;
return -1;
}
error = glGetError();
The Creation of the VBOs, VAOs and TFOs:
//=====================================
// Create VBOs
//=====================================
glGenBuffers(2, VBOID);
glBindBuffer(GL_ARRAY_BUFFER, VBOID[0]);
glBufferData(GL_ARRAY_BUFFER, 3 * sizeof(TVertex_VNL), vertices, GL_DYNAMIC_COPY);
glBindBuffer(GL_ARRAY_BUFFER, 0);
error = glGetError();
glBindBuffer(GL_ARRAY_BUFFER, VBOID[1]);
glBufferData(GL_ARRAY_BUFFER, 3 * sizeof(TVertex_VNL), NULL, GL_DYNAMIC_COPY);
glBindBuffer(GL_ARRAY_BUFFER, 0);
error = glGetError();
//=====================================
// Create VAOs
//=====================================
glGenVertexArrays(2, VAOID);
glBindVertexArray(VAOID[0]);
glBindBuffer(GL_ARRAY_BUFFER, VBOID[0]);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(TVertex_VNL), BUFFER_OFFSET(0)); //position
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(TVertex_VNL), BUFFER_OFFSET(sizeof(float) * 3)); //normal
glVertexAttribPointer(2, 1, GL_FLOAT, GL_FALSE, sizeof(TVertex_VNL), BUFFER_OFFSET(sizeof(float) * 6)); //length
glBindBuffer(GL_ARRAY_BUFFER, 0);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glEnableVertexAttribArray(2);
glBindVertexArray(0);
error = glGetError();
glBindVertexArray(VAOID[1]);
glBindBuffer(GL_ARRAY_BUFFER, VBOID[1]);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(TVertex_VNL), BUFFER_OFFSET(0)); //position
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(TVertex_VNL), BUFFER_OFFSET(sizeof(float) * 3)); //normal
glVertexAttribPointer(2, 1, GL_FLOAT, GL_FALSE, sizeof(TVertex_VNL), BUFFER_OFFSET(sizeof(float) * 6)); //length
glBindBuffer(GL_ARRAY_BUFFER, 0);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glEnableVertexAttribArray(2);
glBindVertexArray(0);
error = glGetError();
//=====================================
// Create TFOs
//=====================================
glGenTransformFeedbacks(2, TFOID);
glBindTransformFeedback(GL_TRANSFORM_FEEDBACK, TFOID[0]);
glBindBufferBase(GL_TRANSFORM_FEEDBACK_BUFFER, 0, VBOID[0]);
glBindTransformFeedback(GL_TRANSFORM_FEEDBACK, 0);
error = glGetError();
glBindTransformFeedback(GL_TRANSFORM_FEEDBACK, TFOID[1]);
glBindBufferBase(GL_TRANSFORM_FEEDBACK_BUFFER, 0, VBOID[1]);
glBindTransformFeedback(GL_TRANSFORM_FEEDBACK, 0);
error = glGetError();
The render Method:
//=========================================
// Clear Screen
//=========================================
//Clear all the buffers
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
//========================================
// Transform Feedback
//========================================
glEnable(GL_RASTERIZER_DISCARD);
glUseProgram(transformProgram);
glBindVertexArray(VAOID[0]);
glBindTransformFeedback(GL_TRANSFORM_FEEDBACK, TFOID[1]);
glBeginTransformFeedback(GL_TRIANGLES);
glDrawArrays(GL_TRIANGLES, 0, 3);
glEndTransformFeedback();
glBindTransformFeedback(GL_TRANSFORM_FEEDBACK, 0);
glDisable(GL_RASTERIZER_DISCARD);
//========================================
// Draw Triangle
//========================================
//Bind the shader that we want to use
glUseProgram(renderProgram);
//Setup all uniforms for your shader
glUniformMatrix4fv(renderMVP, 1, FALSE, &MVP[0][0]);
//Bind the VAO
glBindVertexArray(VAOID[1]);
glDrawArrays(GL_TRIANGLES, 0, 3);
//glDrawTransformFeedback(GL_TRIANGLES, TFOID[1]);
//========================================
// Swap Buffers
//========================================
glutSwapBuffers();
The Vertex Shader:
#version 330
in vec3 position_in;
in vec3 normal_in;
in float length_in;
out vec3 vs_position;
out vec3 vs_normal;
out float vs_length;
void main()
{
vs_position = position_in;
vs_normal = normal_in;
vs_length = length_in;
}
The Geometry Shader:
#version 330
layout(triangles) in;
layout(triangle_strip, max_vertices = 3) out;
in vec3 vs_position[];
in vec3 vs_normal[];
in float vs_length[];
out vec3 position_out;
out vec3 normal_out;
out float length_out;
void main()
{
for(int i = 0; i < 3; i++){
position_out = vs_position[i];
normal_out = vs_normal[i];
length_out = vs_length[i];
EmitVertex();
}
EndPrimitive();
}
Your geometry shader is not emitting any vertices because the for loop body is never entered:
for(int i = 0; i >= 3; i++){