I have changed 3 GLFloat arrays (aka GLfloat[]) into vectors (aka vector) and my triangles stopped working! Here is my init function:
void initialize(GLuint &vao) {
// Use a Vertex Array Object
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
Projection = perspective(45.0f, 16.0f / 9.0f, 0.1f, 100.0f);
//ortho(-4.0f/3.0f, 4.0f/3.0f, -1.0f, 1.0f, -1.0f, 1.0f);
Model = translate(Model, vec3(0.f, 0.f, 0.f));
Model = rotate(Model, 45.0f, vec3(0.0f, 0.0f, 1.0f));
//pos + textures array
GLuint vboId;
glGenBuffers(1, &vboId);
glBindBuffer(GL_ARRAY_BUFFER, vboId);
glBufferData(GL_ARRAY_BUFFER, (vertices_position.size() + texture_coord.size() ) * sizeof(GLfloat), NULL, GL_DYNAMIC_DRAW);
glBufferSubData(GL_ARRAY_BUFFER, 0, vertices_position.size() * sizeof(GLfloat), &vertices_position[0]);
glBufferSubData(GL_ARRAY_BUFFER, vertices_position.size() * sizeof(GLfloat), texture_coord.size() * sizeof(GLfloat), &texture_coord[0]);
//Indices
GLuint eabId;
glGenBuffers(1, &eabId);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, eabId);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(GLfloat), &indices[0], GL_DYNAMIC_DRAW);
//Textures
GLuint textureId;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
load_image("../squirrel.jpg");
shaderProgram = create_program("../shaders/vert.shader", "../shaders/frag.shader");
//Position attribute
positionAttrId = glGetAttribLocation(shaderProgram, "position");
glVertexAttribPointer(positionAttrId, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(positionAttrId);
//Texture attribute
textureCoordId = glGetAttribLocation(shaderProgram, "texture_coord");
glVertexAttribPointer(textureCoordId, 2, GL_FLOAT, GL_FALSE, 0, (GLvoid *)(vertices_position.size() * sizeof(GLfloat)));
glEnableVertexAttribArray(textureCoordId);
modelId = glGetUniformLocation(shaderProgram, "Model");
cameraId = glGetUniformLocation(shaderProgram, "Camera");
}
And below is my render function:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(shaderProgram);
glUniformMatrix4fv(modelId, 1, GL_FALSE, glm::value_ptr(Model));
glUniformMatrix4fv(cameraId, 1, GL_FALSE, glm::value_ptr(cam.matrix()));
//VAO
glBindVertexArray(vao);
//glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glUseProgram(0);
You are using floating-point elements in your Index Buffer Object, that is not a valid data type.
OpenGL requires either GLubyte (not hardware accelerated on most desktop GPUs), GLushort or GLuint indices.
For best performance, you should use an array of GLushort and pass GL_UNSIGNED_SHORT as the element type in your call to glDrawElements (...).
Related
If I try to read the screen with glreadpixels and then draw the same thing again using gldrawpixels it works, BUT if and only if I use anything else to draw than gldrawarrays and gldrawelements (gluSphere/gluCylinder work just fine). I'm trying to draw an object to the screen and save the pixels in an array.
I tried reading/writing to front/back buffers, reading after I swap buffers, all to no avail.
here is the code that I use to draw the object: (please note that I do not use any kind of buffers outside of this scope).
void CCssample5View::DrawCylinder(Model obj)
{
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
Shader castingShader("casting.vs", "casting.fs");
Shader lightShader("light.vs", "light.fs");
GLuint MatrixID = glGetUniformLocation(castingShader.ID, "MVP");
GLuint ViewMatrixID = glGetUniformLocation(castingShader.ID, "V");
GLuint ModelMatrixID = glGetUniformLocation(castingShader.ID, "M");
GLuint textur = loadBMP_custom("flower.bmp");
GLuint TextureID = glGetUniformLocation(castingShader.ID , "myTextureSampler");
indexVBO(obj.vertices, obj.uvs, obj.normals, obj.indices, obj.indexed_vertices, obj.indexed_uvs, obj.indexed_normals);
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, obj.indexed_vertices.size() * sizeof(glm::vec3), &obj.indexed_vertices[0], GL_STATIC_DRAW);
GLuint uvbuffer;
glGenBuffers(1, &uvbuffer);
glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
glBufferData(GL_ARRAY_BUFFER, obj.indexed_uvs.size() * sizeof(glm::vec2), &obj.indexed_uvs[0], GL_STATIC_DRAW);
GLuint normalbuffer;
glGenBuffers(1, &normalbuffer);
glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
glBufferData(GL_ARRAY_BUFFER, obj.indexed_normals.size() * sizeof(glm::vec3), &obj.indexed_normals[0], GL_STATIC_DRAW);
GLuint elementbuffer;
glGenBuffers(1, &elementbuffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, elementbuffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, obj.indices.size() * sizeof(unsigned short), &obj.indices[0], GL_STATIC_DRAW);
castingShader.use();
GLuint LightID = glGetUniformLocation(castingShader.ID, "LightPosition_worldspace");
computeMatricesFromInputs();
GLfloat gProjectionMatrix[16];
glGetFloatv(GL_PROJECTION_MATRIX, gProjectionMatrix);
glm::mat4 ProjectionMatrix = glm::make_mat4(gProjectionMatrix);// = glm::mat4(gProjectionMatrix);
GLfloat gViewMatrix[16];
glGetFloatv(GL_MODELVIEW_MATRIX, gViewMatrix);
glm::mat4 ViewMatrix = glm::make_mat4(gViewMatrix);// = glm::mat4(gProjectionMatrix);
glm::vec3 lightPos = glm::vec3(4, 4, 4);
glUniform3f(LightID, lightPos.x, lightPos.y, lightPos.z);
glUniformMatrix4fv(ViewMatrixID, 1, GL_FALSE, &ViewMatrix[0][0]);
glm::mat4 ModelMatrix1 = glm::mat4(1.0);
glm::mat4 MVP1 = ProjectionMatrix * ViewMatrix * ModelMatrix1;
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textur);
glUniform1i(TextureID, 0);
// Send our transformation to the currently bound shader,
// in the "MVP" uniform
glUniformMatrix4fv(MatrixID, 1, GL_FALSE, &MVP1[0][0]);
glUniformMatrix4fv(ModelMatrixID, 1, GL_FALSE, &ModelMatrix1[0][0]);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, // attribute
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
// 2nd attribute buffer : UVs
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
glVertexAttribPointer(
1, // attribute
2, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
// 3rd attribute buffer : normals
glEnableVertexAttribArray(2);
glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
glVertexAttribPointer(
2, // attribute
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
// Index buffer
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, elementbuffer);
// Draw the triangles !
glDrawElements(
GL_TRIANGLES, // mode
obj.indices.size(), // count
GL_UNSIGNED_SHORT, // type
(void*)0 // element array buffer offset
);
//glFlush(); glFinish(); readScreen(screen, GL_RGB, true);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glDisableVertexAttribArray(2);
glDeleteBuffers(1, &vertexbuffer);
glDeleteBuffers(1, &uvbuffer);
glDeleteBuffers(1, &normalbuffer);
glDeleteBuffers(1, &elementbuffer);
glDeleteProgram(castingShader.ID);
glDeleteTextures(1, &textur);
glDeleteVertexArrays(1, &VertexArrayID);
}
These are my read and draw screen functions:
void CCssample5View::readScreen(GLubyte* screen, GLenum format, bool back)
{
check();
if (format == GL_RGB) {
check();
if (back) {
glReadBuffer(GL_BACK);
check();
}
else {
glReadBuffer(GL_FRONT);
check();
}
}
//glRasterPos2i(00, 00);
//glDisable(GL_DEPTH_TEST);
glPixelStorei(GL_PACK_ALIGNMENT, 1);
glReadPixels(0, 0, w, h, format, GL_UNSIGNED_BYTE, screen);
glFlush();
glFinish();
bool found = false;
for (size_t u = 0; u <= w * h * 4; u++) {
if (screen[u] != 0) {
found = true;
break;
}
}
assert(found);
check();
}
void CCssample5View::drawScreen(GLubyte *screen, GLenum format, bool back)
{
glClear(GL_COLOR_BUFFER_BIT);
//glGetIntegerv(GL_CURRENT_RASTER_POSITION, rasterpos);
if (back) {
glDrawBuffer(GL_BACK);
}
else {
glDrawBuffer(GL_FRONT);
}
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glDrawPixels(w, h, format, GL_UNSIGNED_BYTE, screen);
check();
glFlush();
glFinish();
}
Can not seem to figure out whats wrong when the drawing is perfect except the reading screen part..
I figured it out. All I had to do was add glUseProgram(0) at the very end of the draw cylinder function. After more than 3 weeks of looking into this.
I am trying to use two groups of shaders. ImageShader draws a bigger square, GridShader draws a smaller square. Inside init function I declare the programs (inside new OpenGL::OpenGLShader), after that I insert buffer with position for
I get the following result:
(the bright square is the ImageShader, declared second)
Render result
Here is the code for init() function:
gridShader = new OpenGL::OpenGLShader(Common::GetShaderResource(IDR_SHADERS_GRID_SQUARE_VERTEX), Common::GetShaderResource(IDR_SHADERS_GRID_SQUARE_FRAGMENT));
gridShader->bind();
//3x positions
float verticesSquare[6][3] = {
-0.5f, 0.5f, 0.0f,
0.5f, 0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
imageShader->unbind();
And here are the render functions:
void OpenglRenderer::RenderScene() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
visualize_image();
visualize_grid();
renderToImage();
SwapBuffers(hdc);
}
void OpenglRenderer::visualize_image()
{
imageShader->bind();
GLint position = glGetAttribLocation(imageShader->shader_id, "position");
GLint uvPos = glGetAttribLocation(imageShader->shader_id, "uvPos");
glEnableVertexAttribArray(position);
glVertexAttribPointer(position, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), 0);
glEnableVertexAttribArray(uvPos);
glVertexAttribPointer(uvPos, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(2 * sizeof(float)));
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableVertexAttribArray(position);
glDisableVertexAttribArray(uvPos);
imageShader->unbind();
}
void OpenglRenderer::visualize_grid()
{
gridShader->bind();
GLint position = glGetAttribLocation(gridShader->shader_id, "position");
glEnableVertexAttribArray(position);
glVertexAttribPointer(position, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), 0);
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableVertexAttribArray(position);
gridShader->unbind();
}
Since you want to draw from two different buffers, you have to make sure that the correct one is bound in the rendering method (or better to say, when setting up the vertex attribute pointer). At the moment, all data is taken from vboIndexImage because this is the buffer bound when you call glVertexAttribPointer. From your setup code, I guess you shouldn't even setup the vertex attribute pointers in the render methods and only bind the correct VAO instead:
Setup:
glGenVertexArrays(1, &vaoIndexImage);
glGenBuffers(1, &vboIndexImage);
glBindVertexArray(vaoIndexImage);
glBindBuffer(GL_ARRAY_BUFFER, vboIndexImage);
glBufferData(GL_ARRAY_BUFFER, sizeof(verticesImage), &verticesImage[0][0], GL_STATIC_DRAW);
GLint position = glGetAttribLocation(imageShader->shader_id, "position");
GLint uvPos = glGetAttribLocation(imageShader->shader_id, "uvPos");
glEnableVertexAttribArray(position);
glVertexAttribPointer(position, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), 0);
glEnableVertexAttribArray(uvPos);
glVertexAttribPointer(uvPos, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(2 * sizeof(float)));
Rendering:
glBindVertexArray(vaoIndexImage);
glDrawArrays(GL_TRIANGLES, 0, 6);
Similar code should be used for the grid.
I'm trying to draw two triangles using separate VAOs and VBOs but while execution I see only one triangle being rendered. Below is my code snippet. I'm not sure where I'm messing up.
I'm using glfw and glew.
.....
//initialization and creating shader program
.....
GLfloat vertices[] = {
-0.9f, -0.5f, 0.0f, // Left
-0.0f, -0.5f, 0.0f, // Right
-0.45f, 0.5f, 0.0f, // Top
};
GLfloat vertices2[] = {
0.0f, -0.5f, 0.0f, // Left
0.9f, -0.5f, 0.0f, // Right
0.45f, 0.5f, 0.0f // Top
};
GLuint VBO1, VAO1, EBO;
glGenVertexArrays(1, &VAO1);
glGenBuffers(1, &VBO1);
glGenBuffers(1, &EBO);
GLuint VBO2, VAO2;
glGenBuffers(1, &VAO2);
glGenBuffers(1, &VBO2);
glBindVertexArray(VAO1);
glBindBuffer(GL_ARRAY_BUFFER, VBO1);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
//glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
//glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), (GLvoid*)nullptr);
glEnableVertexAttribArray(0);
//glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
glBindVertexArray(VAO1);
glBindBuffer(GL_ARRAY_BUFFER, VBO2);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices2), vertices2, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), (GLvoid*)nullptr);
glEnableVertexAttribArray(0);
//glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
while (!glfwWindowShouldClose(window))
{
glfwPollEvents();
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(ShaderProgramID);
glBindVertexArray(VAO1);
glDrawArrays(GL_TRIANGLES, 0, 3);
//glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
//glBindVertexArray(0);
glBindVertexArray(VAO2);
glDrawArrays(GL_TRIANGLES, 0, 3);
//glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
//glBindVertexArray(0);
glfwSwapBuffers(window);
}
glDeleteVertexArrays(1, &VAO1);
glDeleteVertexArrays(1, &VAO2);
glDeleteBuffers(1, &VBO1);
glDeleteBuffers(1, &VBO2);
glfwTerminate();
...
Although if I create VAOs and VBOs as array like below and change the code above code accordingly I see both the triangles. I'm unable to understand why is it so?
GLuint VAO[2], VBO[2];
GLuint VBO2, VAO2;
glGenBuffers(1, &VAO2);
...
glBindVertexArray(VAO2);
You've initialized VAO2 as a buffer, not a VAO.
I'm a newbie to OpenGL and I'm trying draw two triangles using two VAOs and only one VBO. Even if after some research, I came to have a better understanding of VAO, VBO and how the needed glew functions work, I have no clue why my program displays only one triangle instead of two. Can somebody help?
...
GLfloat points[] = {
0.5f, 0.5f, 0.0f, //First Triangle
-0.5f, 0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
0.0f, 0.0f, 0.0f, //Second Triangle
-1.0f, 0.0f, 0.0f,
0.0f, -1.0f, 0.0f
};
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(points), points, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
GLuint vao1;
glGenVertexArrays(1, &vao1);
glBindVertexArray(vao1);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
GLuint vao2;
glGenVertexArrays(1, &vao2);
glBindVertexArray(vao2);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (char*)NULL + 9);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
...
while (!glfwWindowShouldClose(window))
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(shader_program);
glBindVertexArray(vao1);
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
glBindVertexArray(vao2);
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
glUseProgram(0);
glfwPollEvents();
glfwSwapBuffers(window);
}
...
The last parameter for this function call is incorrect:
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (char*)NULL + 9);
You're telling it to add 9 bytes, but your points are floats
Try this:
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (char*)NULL + 9 * sizeof(float));
I have fixed it - working solution can be found at the bottom of the page.
I am trying to play with interleaved vbo and vao. I am testing it on a simple triangle - this is how my index data and vertex data look like:
vertex_col data[] = {
vertex_col(vec3(0.5f, 0.5f, 0.5f), vec4(0.0f, 0.0f, 1.0f, 1.0f), vec3(0.0f, 0.0f, 0.0f)),
vertex_col(vec3(-0.5f, -0.5f, -0.5f), vec4(0.0f, 1.0f, 0.0f, 1.0f), vec3(0.0f, 0.0f, 0.0f)),
vertex_col(vec3(-0.5f, 0.5f, 0.5f), vec4(1.0f, 0.0f, 0.0f, 1.0f), vec3(0.0f, 0.0f, 0.0f)),
};
GLushort indices[] = {
0, 1, 2
};
ModelLoader loader;
model = loader.loadToVAO(indices, 3, data, 3);
This is vertex_col struct:
struct vertex_col {
public:
vertex_col(const vec3& pos, const vec4& col, const vec3& normal) {
this->pos = pos;
this->colour = col;
this->normal = normal;
}
vec3 pos;
vec4 colour;
vec3 normal;
};
This is how I load the data into vao and vbo:
RawModel ModelLoader::loadToVAO(const GLushort *indices, int m, const vertex_col *vertices, int n) {
GLuint vaoID = createVAO();
GLuint vboID;
glGenBuffers(1, &vboID);
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glBufferData(GL_ARRAY_BUFFER, n * sizeof(vertex_col), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(vertex_col), (void*) offsetof(vertex_col, pos));
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, sizeof(vertex_col), (void*) offsetof(vertex_col, colour));
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, sizeof(vertex_col), (void*) offsetof(vertex_col, normal));
GLuint vboID_2;
glGenBuffers(1, &vboID_2);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboID_2);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, m * sizeof(GLushort), indices, GL_STATIC_DRAW);
unbindVAO();
return RawModel(vaoID, m);
}
GLuint ModelLoader::createVAO() {
GLuint vaoID;
glGenVertexArrays(1, &vaoID);
glBindVertexArray(vaoID);
return vaoID;
}
And this is how I draw the vao:
void Renderer::render(const RawModel &model) {
shaders["basic"].use();
glBindVertexArray(model.getVaoID());
glDrawElements(GL_TRIANGLES, model.getVertexCount(), GL_UNSIGNED_SHORT, nullptr);
glBindVertexArray(0);
shaders["basic"].release();
}
The last operation causes this error 'Access violation reading location 0x00000000' and I have no idea why. Also if it is any relevant to the question, I am using libraries glew and glfw.
Fixed version:
RawModel *ModelLoader::loadToVAO(const GLushort *indices, int m, const vertex_col *vertices, int n) {
GLuint vaoID = createVAO();
GLuint vboID = 0;
glGenBuffers(1, &vboID);
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glBufferData(GL_ARRAY_BUFFER, n * sizeof(vertex_col), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(vertex_col), (void*) offsetof(vertex_col, pos));
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, sizeof(vertex_col), (void*) offsetof(vertex_col, colour));
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, sizeof(vertex_col), (void*) offsetof(vertex_col, normal));
vboID = 0;
glGenBuffers(1, &vboID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vboID);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, m * sizeof(GLushort), indices, GL_STATIC_DRAW);
unbindVAO();
return new RawModel(vaoID, m);
}