I can succesfully draw a scene with glDrawArrays which looks like this:
This technique is a bit slow so i decided to make a indexbuffer and tried to use glDrawElements. The result of that looks like this:
As you probably can see, the squares top right are rendered incorrectly and square below the airplane dissappeared.
The code for generating the buffers
//create vertex and index buffer
glGenBuffers(1, &gVertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, gVertexBuffer);
glGenBuffers(1, &gIndexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, gIndexBuffer);
// Define the size of the buffers
GLuint floatAmount = 0;
GLuint GLuintAmount = 0;
for each (MeshObject* mesh in meshes)
{
floatAmount += mesh->GetFloatAmount();
GLuintAmount += mesh->GetGLuintAmount();
}
glBufferData(GL_ARRAY_BUFFER, floatAmount, 0, GL_STATIC_DRAW);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, GLuintAmount, 0, GL_STATIC_DRAW);
// Define size and offset of the different subdata in the buffers
GLuint offsetVer = 0;
GLuint offsetInd = 0;
for each (MeshObject* mesh in meshes)
{
// Set offset for mesh
mesh->SetOffset(offsetVer / sizeof(Point));
mesh->SetOffsetInd(offsetInd);
glBufferSubData(GL_ARRAY_BUFFER,
offsetVer,
mesh->GetFloatAmount(),
mesh->GetPoints().data());
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER,
offsetInd,
mesh->GetGLuintAmount(),
mesh->GetIndicies().data());
offsetVer += mesh->GetFloatAmount();
offsetInd += mesh->GetGLuintAmount();
}
... and the code for the rendering
glBindBuffer(GL_ARRAY_BUFFER, gVertexBuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, gIndexBuffer);
mat4 vwMatrix = localCamera->GetPVMatrix() * mh->GetWorld();
glUniformMatrix4fv(projectionviewworldMatrixUniformLocation, 1, GL_FALSE, &(GLfloat)vwMatrix[0][0]);
//glDrawArrays(GL_TRIANGLES, mh->mesh->GetOffset(), mh->mesh->GetPoints().size());
glDrawElements(GL_TRIANGLES, mh->mesh->GetIndicies().size(), GL_UNSIGNED_INT, (void*)mh->mesh->GetOffsetInd());
//GLuint size = mh->mesh->GetIndicies().size();
//GLuint IndSize = mh->mesh->GetOffsetInd();
//glDrawElements(GL_TRIANGLES, size, GL_UNSIGNED_INT, (void*)IndSize);
You need to add offsetVer on each element of the index buffer. Here is an example:
Mesh A ( 1 triangle)
Vertices: v0 v1 v2
Indices: 0 1 2
Mesh B ( 1 triangle)
Vertices: v3 v4 v5
Indices: 0 1 2
That is how your combined buffer looks like:
Vertices: v0 v1 v2 v3 v4 v5
Indices: 0 1 2 0 1 2
That is how it should be:
Vertices: v0 v1 v2 v3 v4 v5
Indices: 0 1 2 3 4 5
Related
I'm having some difficulty mixing instanced data with non-instanced data.
What I have is an array of GLfloats and an array of GLuints.
The GLuints is my index-data for rendering Elements. My GLfloats is the vertex, texel and depth information.
What I'm doing is collecting all the data required for rendering a series of quads and them submitting them all at once:
For each Quad there are:
Four vertices:
2 floats for the vertex position
2 floats for the texel position
One depth float
6 indices submitted to the index buffer
They are in that order. So after one blit of a texture that I want to show all of it to the extents of the screen I would expect to see (stretch the whole texture to fit screen) (top-right, bottom-right, bottom-left, top-left) (texture coords may seem flipped but they're correct):
Float Buffer
[ 1 | -1 | 1 | 0 ] [ 1 | 1 | 1 | 1 ] [ -1 | 1 | 0 | 1 ] [ -1 | -1 | 0 | 0 ] [ 0.9 ]
[ pos | tex ] [ pos | tex ] [ pos | tex ] [ pos | tex ] [depth]
Uint Buffer
[ 0 | 1 | 3 | 3 | 1 | 2 ]
In case it's not clear, each of the four vertices for a quad are to use the same depth value in their frags.
And so, I setup the buffers like so:
uint32_t maxNum = 32; // this is the max amount I can submit
glGenBuffers(1, &m_vbo); // to store my vertex positions, texels and depth
glBindBuffer(GL_ARRAY_BUFFER, m_vbo);
glBufferData(GL_ARRAY_BUFFER, 17 * maxNum, nullptr, GL_DYNAMIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glGenBuffers(1, &m_indicesBuffer); // to store the indices
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_indicesBuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 6 * maxNum, nullptr, GL_DYNAMIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glGenVertexArrays(1, &m_vertexArrayObject);
glBindVertexArray(m_vertexArrayObject);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glEnableVertexAttribArray(2);
glBindBuffer(GL_ARRAY_BUFFER, m_vbo);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 17, 0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 17, (void*)(sizeof(GLfloat)*2));
glVertexAttribPointer(2, 1, GL_FLOAT, GL_FALSE, 17, (void*)(sizeof(GLfloat)*16));
glVertexAttribDivisor(0,0);
glVertexAttribDivisor(1,0);
glVertexAttribDivisor(2,4);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_indicesBuffer);
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
And populating the buffers and drawing is like so. The vertexAndTexelData and indicesData are the GLfloat and GLuint buffers discussed above. numSubmitted is how many I'm actually drawing.
glBindTexture(GL_TEXTURE_2D, texture);
glBindBuffer(GL_ARRAY_BUFFER, m_vbo);
glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(GLfloat) * vertexAndTexelData.size(), vertexAndTexelData.data());
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_indicesBuffer);
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER, 0, sizeof(GLuint) * indicesData.size(), indicesData.data());
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindVertexArray(m_vertexArrayObject);
glDrawElementsInstanced(GL_TRIANGLES,indicesData.size(),GL_UNSIGNED_INT,(const void*)0,numSubmitted);
glBindVertexArray(0);
When I draw all this, I'm getting a black screen. If I take out the depth data and the instanced drawing stuff, everything works perfectly fine. So I'm guessing it's to do with the instanced drawing.
If I put this through RenderDoc I see the following:
Buffer Bindings
So I can see my three vertex attributes appear to be set correctly.
Vertex Attribute Formats
So these appear to be in the right layout and the correct data types.
Mesh Output
However, if I look at the Mesh data being submitted...
Something is clearly wrong. The Depth values all appear to be correct, so it appears the instanced drawing is working there.
But obviously the positions and texels are busted. Interestingly, where there isn't a garbage value, i.e. a -1, 0 or 1, it is correct for that position.
I suspect it's the stride or offset or something... but I can't see the issue.
I followed a tutorial to build a .obj model with OpenGL.
I have only one problem, at the end, we have a vectorglm::vec3 to draw.
In the tutorial they said to use "glBufferData()"
Then I made that
float* _vertices = new float[vertices.size() * 3];
for (int i = 0; i < vertices.size(); ++i)
{
float* _t = glm::value_ptr(vertices[i]);
for (int j = 0; j < 3; ++j)
_vertices[i + j*(vertices.size() - 1)] = _t[j];
}
(I converted my vector un float*)
Then I load it:
mat4 projection;
mat4 modelview;
projection = perspective(70.0, (double)800 / 600, 1.0, 100.0);
modelview = mat4(1.0);
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(_vertices), _vertices, GL_DYNAMIC_DRAW);
And I finally draw it in my main loop :
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
modelview = lookAt(vec3(3, 1, 3), vec3(0, 0, 0), vec3(0, 1, 0));
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, vertices.size());
glDisableVertexAttribArray(0);
But it does not work... (I have a black screen)
sizeof(_vertices) does not give you what you expect. It returns the size of float*, which is a pointer, and not the number of bytes of the data behind the pointer.
Use vertices.data() for the pointer to the first element in the std::vector and 3 * vertices.size() * sizeof(float) as the number of bytes if your vector contains floats (glm::vec3 containes 3 floats).
together like:
glBufferData(GL_ARRAY_BUFFER, 3 * vertices.size() * sizeof(float), vertices.data(), GL_DYNAMIC_DRAW);
You can also substitute 3 * sizeof(float) to sizeof(glm::vec3).
Also check if your glm::perspective function expects the field of view as degrees or radians, you currently use 70.0 degrees.
I have an array of 131072 values to draw in opengl with shaders. The coordinate of each point is calculated with the indice of the value, but i can't draw that. Now i have an error in the glDrawArrays.
This is part of my code set the vao and vbo, imagen is a CGfloat pointer with the data;
int pixels = 131072;
// Create vertex array objects
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
// Create vertex buffers
glGenBuffers(1, &vbo);
// VBO for coordinates of first square
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER,
pixels * sizeof(GLfloat),
imagen,
GL_STATIC_DRAW);
glVertexAttribPointer(0, pixels, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(0);
and this is my display function:
void display(void) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindVertexArray(vao);
glDrawArrays(GL_POINTS, 0, 1);
glBindVertexArray(0);
glutSwapBuffers();
glutPostRedisplay();
}
if i pass an array to shader how can handle the array to calculate the coordinates with the index of each value???
Edit
This is how calculate the coordinates of each point with the index of the array, if i have one cube of 64x64x32 pixels i do this:
XX = 64;
YY = 64;
ZZ = 32;
x = index % XX;
y = (index / XX) % YY;
z = (int) floor((double) index / (XX * YY));
And with the value of the each element of the array calculate the color of that point
Edit 2
This is the image that i get when i draw all points and i need fill this object and get a volume
I have a working Vertex-Buffer-Object but I need to add the normals.
The normales are stored in the same array as the vertex positons. They are interleaved
Vx Vy Vz Nx Ny Nz
This is my code so far:
GLfloat values[NUM_POINTS*3 + NUM_POINTS*3];
void initScene() {
for(int i = 0; i < (NUM_POINTS) ; i = i+6){
values[i+0] = bunny[i];
values[i+1] = bunny[i+1];
values[i+2] = bunny[i+2];
values[i+3] = normals[i];
values[i+4] = normals[i+1];
values[i+5] = normals[i+2];
}
glGenVertexArrays(1,&bunnyVAO);
glBindVertexArray(bunnyVAO);
glGenBuffers(1, &bunnyVBO);
glBindBuffer(GL_ARRAY_BUFFER, bunnyVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(bunny), bunny, GL_STATIC_DRAW);
glVertexAttribPointer(0,3, GL_FLOAT, GL_FALSE, 0,0);
glEnableVertexAttribArray(0);
glGenBuffers(1, &bunnyIBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bunnyIBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(triangles), triangles, GL_STATIC_DRAW);
// unbind active buffers //
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
void renderScene() {
if (bunnyVBO != 0) {
// x: bind VAO //
glEnableClientState(GL_VERTEX_ARRAY);
glBindVertexArray(bunnyVAO);
glDrawElements(GL_TRIANGLES, NUM_TRIANGLES, GL_UNSIGNED_INT, NULL);
glDisableClientState(GL_VERTEX_ARRAY);
// unbind active buffers //
glBindVertexArray(0);
}
}
I can see something on the screen but it is not right as the normals are not used correctly...
How can I use the values array correctly connected with my code so far.
You need to call glVertexAttribPointer two times, once for the vertices and once for the normals. This is how you tell OpenGL how your data is layed out inside your vertex buffer.
// Vertices consist of 3 floats, occurring every 24 bytes (6 floats),
// starting at byte 0.
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 24, 0);
// Normals consist of 3 floats, occurring every 24 bytes starting at byte 12.
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 24, 12);
This is assuming that your normal attribute in your shader has an index of 1.
I am trying to approximate a curved surface using quadrilateral patches. I did it using straight forward rendering using GL_QUADS and specifying the four vertices of the quad patch.
Now I am trying to get some performance using vertex buffers and overlayed array (verNor) of vertices and normals. The problem is that I get some random shapes but not the correct shape I got previously.
Here I am putting my code:
GLenum err = glewInit();
if (GLEW_OK != err){
std::cout<<"Filed to Initialize GLEW :: "<<glewGetErrorString(err)<<std::endl;
}
verNor = new GLfloat [NA*NP*6]; // NA and NP are number of points in lets say x and y axis
indices = new GLuint [(NA)*(NP)*4]; // When the tube is cut an spread out.
// VBOs
glGenBuffers(1, &vbo_tube); // Ask the GPU driver for a buffer array. "vbo" now has the ID
glGenBuffers(1, &ibo_indices);
// For Vertices and Normals which are interleved
glBindBuffer(GL_ARRAY_BUFFER, vbo_tube);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat) * 6*NA*NP, NULL, GL_STATIC_DRAW);
// Obtaining the pointer to the memory in graphics buffer
buffer_verNor = glMapBuffer(GL_ARRAY_BUFFER,GL_WRITE_ONLY);
// For Indices
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo_indices);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(int) * 4*(NA-1)*(NP-1), NULL, GL_STATIC_DRAW);
buffer_indices = glMapBuffer(GL_ELEMENT_ARRAY_BUFFER,GL_WRITE_ONLY);
// Calculate the vertices of the points around the tube. Correctness guarenteed because I can draw exactly what I wanted
// using normal stright forward GL_QUADS that is drawing quad by quad and with out any VBOs
// Calculated vertices are stored in vPoints.
for (int i=0; i<NP; i++) {
for (int j=0; j<NA; j++) {
// Calculate the normals of each and every point above and store them in v3
// Storing the vertices
verNor[6*( (i)*NA+(j) )+0] = (GLfloat)vPoints[i*NA+j].GetX();
verNor[6*( (i)*NA+(j) )+1] = (GLfloat)vPoints[i*NA+j].GetY();
verNor[6*( (i)*NA+(j) )+2] = (GLfloat)vPoints[i*NA+j].GetZ();
// Storing the Normals
verNor[6*((i-1)*NA+(j-1))+3] = (GLfloat)v3.GetX();
verNor[6*((i-1)*NA+(j-1))+4] = (GLfloat)v3.GetY();
verNor[6*((i-1)*NA+(j-1))+5] = (GLfloat)v3.GetZ();
// Calculating the indices which form the quad
indices[4*((i)*NA+(j))+0] = (GLuint) (i)*NA+j ;
indices[4*((i)*NA+(j))+1] = (GLuint) (i+1)*NA+j ;
indices[4*((i)*NA+(j))+2] = (GLuint) (i+1)*NA+j+1 ;
indices[4*((i)*NA+(j))+3] = (GLuint) (i)*NA+j+1 ;
}
}
memcpy(buffer_verNor, verNor, 6*(NA)*(NP));
glUnmapBuffer(GL_ARRAY_BUFFER); // Unmapping the buffer
memcpy(buffer_indices, indices, 4*(NA-1)*(NP-1));
glUnmapBuffer(GL_ELEMENT_ARRAY_BUFFER);
glEnable(GL_LIGHTING);
// Performing the Vertex Buffer Stuff
// For Vertices and Normals
glBindBuffer(GL_ARRAY_BUFFER, vbo_tube);
glVertexPointer( 3, GL_FLOAT, 6*sizeof(GLfloat), (GLvoid*)((char*)NULL + 0*sizeof(GLfloat)) );
glNormalPointer( GL_FLOAT, 6*sizeof(GLfloat), (GLvoid*)(((char*)NULL)+3*sizeof(GLfloat)) );
// For Indices
// Mapping the indices_vbo memory here
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo_indices);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLuint)*4*(NA-1)*(NP-1), indices, GL_STATIC_DRAW);
// Enabling all the buffers and drawing the quad patches
glBindBuffer(GL_ARRAY_BUFFER, vbo_tube);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo_indices);
// Enabling normals and vertices to draw
glEnableClientState (GL_NORMAL_ARRAY);
glEnableClientState(GL_VERTEX_ARRAY);
// Drawing the patches
glDrawElements(GL_QUADS, (NA-1)*(NP-1), GL_UNSIGNED_INT,(GLvoid*)((char*)NULL));
// Disabling the buffer objects for safety
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
glDeleteBuffers(1, &vbo_tube);
glDeleteBuffers(1, &ibo_indices);
The gird has NA by NP points so I have to draw (NP-1)*(NA-1) quads.
Also I can only get some thing(but not correct) drawn only when I give wrong offsets and stride in glVertexPointer() and glNormalPointer() function. Correct ones i think are
vertexPointer :: Stride - 6*sizeof(GLfloat) , offset - 0(last argument)
normalPointer :: Stride - 6*sizeof(GLfloat) , offset - 3*sizeof(GLfloat)