OpenGL mix fix pipeline and shader program (Qt) - c++

I'm working on a old code that used fixed function pipeline, the scene is a bit complex but works fine. For the sake of simplicity, I replaced it with one blue triangle :
void RenduOpenGL::paintGL()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
glViewport(0, 0, this->width(), this->height());
glBegin(GL_TRIANGLES);
glColor3d(0,0,1);
glVertex3d(0.7, 0.7, 0.0);
glVertex3d(-0.5, 0.7, 0.0);
glVertex3d(0.1, -0.7, 0.0);
glEnd();
}
Now I want to add shaders for new elements in the scene but keep the old elements of the scene like this blue triangle.
I've read here that I can mix the two to produce a scene containing the first then the second.
Therefore I want to add this code after the blue triangle :
float vertices[] = {
0.6, 0.6, 0.0,
-0.6, 0.6, 0.0,
0.0, -0.6, 0.0,
};
vbo.create(); // glGenBuffers(...);
vbo.bind(); // glBindBuffer(GL_ARRAY_BUFFER, vbo);
vbo.allocate(vertices, sizeof(vertices)); // glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(float), vertices, GL_STATIC_DRAW);
vbo.release(); // glBindBuffer(GL_ARRAY_BUFFER, 0);
prog.addShaderFromSourceFile(QOpenGLShader::Vertex, "shaders/base.vert");
prog.addShaderFromSourceFile(QOpenGLShader::Fragment, "shaders/base.frag");
vao.create(); // glGenVertexArrays(...)
vao.bind(); // glBindVertexArray(vao);
prog.enableAttributeArray("position"); // glEnableVertexAttribArray(VAO_position);
prog.setAttributeBuffer("position", GL_FLOAT, 0, 3); // (offset, size, stride=0); // glVertexAttribPointer(VAO_position, 4, GL_FLOAT, False, 0, reinterpret_cast<const void *>(offset)(0)); (False,
vao.release(); // glBindVertexArray(0);
// draw the triangle
prog.bind(); // glUseProgram(shader_program);
vao.bind(); // glBindVertexArray(vertex_array_object);
glDrawArrays(GL_TRIANGLES, 0, 3);
vao.release(); // glBindVertexArray(0);
prog.release(); // glUseProgram(0);
I use Qt to call the openGL functions, the corresponding opengl functions are in comments.
My shaders are very basic :
// base.vert
#version 330
// vertex shader
in vec3 position;
void main() {
gl_Position = vec4(position.xyz, 1);
}
// base.frag
#version 330
// fragment shader
out vec4 pixel;
void main() {
pixel = vec4(1, 0.5, 0, 1);
}
That is supposed to draw an orange triangle, but when I put the code after the blue triangle code, I don't see the orange triangle created from shaders.

Short (with code) answer:
The VBO and the prog.enableAttributeArray and prog.setAttributeBuffer should be in the VAO.
Something along the lines:
float vertices[] = {
0.6, 0.6, 0.0,
-0.6, 0.6, 0.0,
0.0, -0.6, 0.0,
};
prog.bind(); // glUseProgram(shader_program);
vao.create(); // glGenVertexArrays(...)
vao.bind(); // glBindVertexArray(vao);
vbo.create(); // glGenBuffers(...);
vbo.bind(); // glBindBuffer(GL_ARRAY_BUFFER, vbo);
vbo.allocate(vertices, sizeof(vertices)); // glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(float), vertices, GL_STATIC_DRAW);
//vbo.release(); // glBindBuffer(GL_ARRAY_BUFFER, 0);
prog.addShaderFromSourceFile(QOpenGLShader::Vertex, "shaders/base.vert");
prog.addShaderFromSourceFile(QOpenGLShader::Fragment, "shaders/base.frag");
prog.enableAttributeArray("position"); // glEnableVertexAttribArray(VAO_position);
prog.setAttributeBuffer("position", GL_FLOAT, 0, 3); // (offset, size, stride=0); // glVertexAttribPointer(VAO_position, 4, GL_FLOAT, False, 0, reinterpret_cast<const void *>(offset)(0)); (False,
vao.release(); // glBindVertexArray(0);
// draw the triangle
prog.bind(); // glUseProgram(shader_program);
vao.bind(); // glBindVertexArray(vertex_array_object);
glDrawArrays(GL_TRIANGLES, 0, 3);
vao.release(); // glBindVertexArray(0);
prog.release(); // glUseProgram(0);
Not so long but textual answer: OpenGL is a state machine, you need to link together: the VBO and how to read its data, inside the VAO. However, IMHO, Qt people have sadly chosen their abstractions poorly: enableAttributeArray and setAttributeBuffer would be clearer as members of the VAO class instead of the prog class.

Related

How to use Vertex Array Object?

I am trying to simulate a cube bouncing between walls. For this purpose, I have constructed 2 vertex array objects, one of them representing the cube and the other representing walls. Firstly, I created walls. I am drawing walls with glDrawElements call since I have used index (element) buffers to create it. I have also created a cube model, but I am not using index buffer for it, so I will use glDrawArrays call to draw it as far as I know. My walls are showing up nicely, but I cannot figure out how to draw the cube (at this step, I am not even trying to animate the cube, I just want to draw it) This is what my code looks like:
#include "Cube.h"
#include <cstring>
GLuint vao[2];
void init()
{
cube();
GLuint program = InitShader( "vshader.glsl", "fshader.glsl" );
glUseProgram( program );
GLuint vPosition = glGetAttribLocation(program, "vPosition");
glGenVertexArrays(2, vao);
glBindVertexArray(vao[0]);
GLuint cube_buffer;
glGenBuffers(1, &cube_buffer);
glBindBuffer(GL_ARRAY_BUFFER, cube_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(cubePoints), cubePoints, GL_STATIC_DRAW);
glEnableVertexAttribArray(vPosition);
glVertexAttribPointer(vPosition, 4, GL_FLOAT, GL_FALSE, 0, (void*)0);
const float mid_value = 0.0f;
const float far_value = 0.35f;
const float near_value = 1.0f;
const float positions[60] = {
/// positions /// colors
mid_value, -near_value, 1.0, 0.0, 0.0, /// 0
near_value, -near_value, 1.0, 0.0, 0.0, /// 1
near_value, mid_value, 0.0, 1.0, 0.0, /// 2
near_value, near_value, 0.0, 1.0, 0.0, /// 3
mid_value, near_value, 1.0, 1.0, 0.0, /// 4
-near_value, near_value, 1.0, 1.0, 0.0, /// 5
-near_value, mid_value, 0.0, 0.0, 1.0, /// 6
-near_value, -near_value, 0.0, 0.0, 1.0, /// 7
far_value, -far_value, 1.0, 0.0, 0.0, /// 8
far_value, far_value, 0.0, 1.0, 0.0, /// 9
-far_value, far_value, 1.0, 1.0, 0.0, /// 10
-far_value, -far_value, 0.0, 0.0, 1.0 /// 11
};
unsigned int indices[36] = {
7,11,0,
0,8,1,
1,8,2,
2,9,3,
3,9,4,
4,10,5,
5,10,6,
6,11,7,
11,0,8,
8,2,9,
9,4,10,
10,6,11,
};
glBindVertexArray(vao[1]);
GLuint room_buffer;
glGenBuffers(1, &room_buffer);
glBindBuffer(GL_ARRAY_BUFFER, room_buffer);
glBufferData(GL_ARRAY_BUFFER, 60 * sizeof(float), positions, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 5, (void*)0); /// positions
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 5, (void*)(2* sizeof(float))); /// colors
GLuint index_buffer;
glGenBuffers(1, &index_buffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, index_buffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 36 * sizeof(unsigned int), indices, GL_STATIC_DRAW);
glClearColor(1.0, 1.0, 1.0, 1.0);
}
void display()
{
glClear(GL_COLOR_BUFFER_BIT);
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_INT, nullptr);
// glDrawArrays(...); --> how to use this, or should I really use this here?
glutSwapBuffers();
}
void idle() {
glutPostRedisplay();
}
void reshape(int width, int height) {
glViewport(0, 0, width, height);
}
int main( int argc, char **argv )
{
glutInit( &argc, argv );
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE);
glutInitWindowSize( 600, 600 );
glutCreateWindow( "Bouncing Cube" );
glewExperimental = GL_TRUE;
glewInit();
init();
glutDisplayFunc(display);
glutIdleFunc(idle);
glutReshapeFunc(reshape);
glutMainLoop();
}
In this code, cubePoints is filled by cube() method in another translation unit. I am not exactly sure how to draw cube on the screen, and how to use glDrawArrays here? Should I bind or unbind anything to draw it? I feel like I am not using vertex array objects properly. Below is what my walls look like:
I simply want cube to appear on the far side, where the white rectangle is located at. Is it possible to call glDrawArrays with glDrawElements?
You must bind the Vertex Array Object before the "draw" call. The drawing instruction uses the data stored in the currently bound Vertex Array Object:
void display()
{
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(vao[0]);
glDrawElements(GL_TRIANGLES, 36, GL_UNSIGNED_INT, nullptr);
glBindVertexArray(vao[1]);
glDrawArrays(...);
glutSwapBuffers();
}

Using and binding multiple vbos in OpenGL

I am trying to experiment and learn how to use multiple VBOs to draw very different objects in OpenGL. The first VBO is just a text renderer, and for now the shader to be used with the main VBO is just a basic vertex shader
attribute vec3 vPosition;
void main()
{
gl_Position = vec4(vPosition, 1.0);
}
Test Fragment Shader:
void main()
{
gl_FragColor = vec4(1.0, 1.0, 0.0, 1.0);
}
The code renders the text fine, but as soon as I call UseProgram and bind the VAO, I get a black screen.
Here's my initialization of said VAO:
//random polygon
points[0]= vec3(-0.5, 0.5, 0.3);
points[1]= vec3(-0.5, -0.5, 0.2);
points[2]= vec3(0.5, -0.5, 0.3);
points[3]= vec3(0.5, 0.5, 0.4);
//Returns a uInt name for a shader program
shader = InitShader("vshader.glsl", "fshader.glsl");
glGenVertexArrays(1, &vao);
glGenBuffers(1, &buffer);
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(points), points, GL_STATIC_DRAW);
GLuint loc = glGetAttribLocation(shader, "vPosition");
glEnableVertexAttribArray(loc);
glVertexAttribPointer(loc, 3, GL_FLOAT, GL_FALSE, 3*sizeof(float), BUFFER_OFFSET(0));
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
glClearColor(1.0f, 1.0f, 1.0f, 1.0f); // white background
Where vao and buffer are previously uninitialized GLuints, and after debugging they do not return -1.
And here is my code to draw
glUseProgram(shader);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBindVertexArray(vao);
glDrawArrays(GL_POLYGON, 0, 4);
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
Thanks for all the help everyone, I figured out that I needed to call glFlush() before binding my second VBO and VAO and drawing them.

How to draw multiple shapes

I'm trying to make a program with OpenGL that creates two rectangles, but my problem is that when I create my two model_views, the first one gets overwritten by the second one so only one rectangle is displaying and I'm not sure how to display both. I posted the entire code. How do I get both triangles to render?
using namespace std;
#include "vgl.h"
#include "LoadShaders.h"
#include "glm\glm.hpp"
#include "glm\gtc\matrix_transform.hpp"
enum VAO_IDs { Triangles, NumVAOs };
enum Buffer_IDs { ArrayBuffer, NumBuffers };
enum Attrib_IDs { vPosition = 0 };
GLuint VAOs[NumVAOs];
GLuint Buffers[NumBuffers];
GLuint location;
const GLuint NumVertices = 4;
//---------------------------------------------------------------------
// Setting up our pipeline and preparing to draw
void init(void)
{
//Defining the name of our shader files
ShaderInfo shaders[] = {
{ GL_VERTEX_SHADER, "triangles.vert" },
{ GL_FRAGMENT_SHADER, "triangles.frag" },
{ GL_NONE, NULL }
};
//Loading and attaching shaders to our pipeline
GLuint program = LoadShaders(shaders);
glUseProgram(program); //My Pipeline is set up
// Coordinates of vertices (Square)
GLfloat vertices[NumVertices][2] = {
{ -0.3, -0.45 },
{ 0.3, -0.45 },
{ 0.3, 0.45 },
{ -0.3, 0.45 }
};
// Colors for vertices in {R, G, B} mode
GLfloat colorData[NumVertices][3] = {
{ 1,0,0 }, //Red
{ 0,1,0 }, //Green
{ 0,0,1 }, //Blue
{ 1,1,1 } //White
};
glGenBuffers(2, Buffers);
glBindBuffer(GL_ARRAY_BUFFER, Buffers[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindAttribLocation(program, 0, "vPosition");
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, Buffers[1]);
glBufferData(GL_ARRAY_BUFFER, sizeof(colorData), colorData, GL_STATIC_DRAW);
glBindAttribLocation(program, 1, "vertexColor");
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
glEnableVertexAttribArray(1);
location = glGetUniformLocation(program, "model_matrix");
}
//---------------------------------------------------------------------
//This is done by using glutDisplayFunc function. Look at the main method
void drawScene(void)
{
//Clear the screen and preparing to draw
glClear(GL_COLOR_BUFFER_BIT);
/////////THIS IS WERE I CREATE RECTANGLES
//Sun
glm::mat4 model_view = glm::translate(glm::mat4(1.0), glm::vec3(0.0, 0.0, 0.0));
model_view = glm::scale(model_view, glm::vec3(0.5, 0.5, 0)); //shrink it
glUniformMatrix4fv(location, 1, GL_FALSE, &model_view[0][0]);
glm::mat4 model_view2 = glm::translate(glm::mat4(1.0), glm::vec3(0.5, 0.0, 0.0));
model_view2 = glm::scale(model_view2, glm::vec3(0.5, 0.5, 0)); //shrink it
glUniformMatrix4fv(location, 1, GL_FALSE, &model_view2[0][0]);
//The following function passes the generated rotation function into the vertex-shader
//Starting the pipeline
glDrawArrays(GL_QUADS, 0, NumVertices);
glDrawArrays(GL_QUADS, 0, NumVertices);
//Flush the image onto the window (screen)
glFlush();
}
//The registration happens in the main() function using
glutIdleFunc(runEveryFrame) function.
void runEveryFrame()
{
//Increasing our rotation angle
rotate_value += 0.001;
glutPostRedisplay();
}
int main(int argc, char** argv)
{
//Initializing to draw
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGBA);
glutInitWindowSize(512, 512);
glutCreateWindow("Hello World");
glewInit();
//init function is defined above
init();
//Registering the display function
glutDisplayFunc(drawScene);
//Registering the idle function
glutIdleFunc(runEveryFrame);
//glutMainLoop enters the event processing loop
glutMainLoop();
}
You call glDrawArrays with the same arguments, without changing any uniform between the two calls. My bet would that the two shapes are drawed, one over the other.
You'd have to call glUniformXXX before each glDrawArrays calls, if the uniform data (the model/view matrices) have changed :
void drawScene(void) {
glClear(GL_COLOR_BUFFER_BIT);
//first draw call with model/view transformation 1
glm::mat4 model_view = glm::translate(glm::mat4(1.0), glm::vec3(0.0, 0.0, 0.0));
model_view = glm::scale(model_view, glm::vec3(0.5, 0.5, 0)); //shrink it
glUniformMatrix4fv(location, 1, GL_FALSE, &model_view[0][0]);
glDrawArrays(GL_QUADS, 0, NumVertices);
//second draw call with model/view transformation 2
glm::mat4 model_view2 = glm::translate(glm::mat4(1.0), glm::vec3(0.5, 0.0, 0.0));
model_view2 = glm::scale(model_view2, glm::vec3(0.5, 0.5, 0));
glUniformMatrix4fv(location, 1, GL_FALSE, &model_view2[0][0]);
glDrawArrays(GL_QUADS, 0, NumVertices);
//Flush the image onto the window (screen)
glFlush();
}

Cannot Link GLSL Program

I can't get the following Code to work. I want to render the triangle I describe in position[].
The program gives me sometimes a shader compile error or a program linking error and sometimes even both without me changing the code in between.
Program:
Window window(TITLE, WIDTH, HEIGHT) // Context and glew gets init here
float position[] = {-0.5, -0.5, 0.0,
0.5, -0.5, 0.0,
0.0, 0.5, 0.0};
// Shader program gets init
sID = glCreateProgram();
vertexShaderID = glCreateShader(GL_Vertex_SHADER);
fragShaderID = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(vertexShaderID, 1, vertexShaderCodeString); // The Code strings are valid, I printed them out
glShaderSource(fragmentShaderID, 1, fragShaderCodeString);
glCompileShader(vertexShaderID);
glCompileShader(fragShaderID);
shaderDidCompileCheck(vertexShaderID); // A function I wrote that checks for errors
shaderDidCompileCheck(fragShaderID);
glAttachShader(sID, vertexShaderID);
glAttachShader(sID, fragShaderID);
glLinkProgram(sID);
programDidLinkCheck(); // A function I wrote that checks for errors
glValidateProgram(sID);
glUseProgram(sID);
glBindAttribLocation(sID, 0, "position");
glUseProgram(0);
// Defining VBOs and VAOs
int bufferID;
glGenBuffers(1, &bufferID);
glBindBuffer(GL_ARRAY_BUFFER, bufferID);
glBufferData(GL_ARRAY_BUFFER, 3 * 3 * sizeof(float), mDataPtr, GL_STATIC_DRAW); // I have 3 vertices with 3 coordinates each
glBindBuffer(GL_ARRAY_BUFFER, 0);
int vaoID;
glGenVertexArrays(1, &vaoID);
glBindVertexArray(vaoID);
glBindBuffer(GL_ARRAY_BUFFER, bufferID);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL); // I Want to load it to the index 0 of the VAO, the vertex size is 3, the data type is GL_FLOAT
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
glBindVertexArray(vaoID);
glEnableVertexAttribArray(0);
glBindVertexArray(0);
// Main loop
while(!window.close()) {
glClearColor(0.3, 0.8, 0.6, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(sID);
glBindVertexArray(vaoID);
glDrawArrays(GL_TRIANGLES, 0, 1);
glBindVertexArray(0);
glUseProgram(0);
glfwSwapBuffers(mWindow);
glfwPollEvents();
}
Vertex Shader:
#version 410
in vec3 position;
out vec3 color;
void main() {
gl_Position = vec4(position, 1.0);
color = vec3(position.x + 0.5, 0.5, position.y + 0.5);
}
Fragment Shader:
#version 410
in vec3 color;
out vec4 outputColor;
void main() {
outputColor = vec4(color, 1.0);
}
I think there are a few typos in your code. You write "vertexShaderID = glCreateShader(GL_Vertex_SHADER)", but macros are indicated in capital letters -> "GL_VERTEX_SHADER". Try glShaderSource(vertexShaderID, 1, vertexShaderCodeString, NULL), instead of passing 3 parametrs.
How did you saved your sourcecode of the shaders ? Make shure to have \n
after the version declaration, otherwise you will end up having the code in the same line as the #, which is bad.

OpenGL shows nothing

I have a problem while trying to render a triangle with OpenGL using LWJGL.
It's doing nothing, it's neither render something nor does it throw any error.
glClear() is working (if I change the color, the color changes).
You can find a GLIntercept Log here:
GLIntercept Log
This is my OpenGL Initialization-Code:
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glFrontFace(GL_CW);
glEnable(GL_DEPTH_TEST);
glDepthMask(true);
glDepthFunc(GL_LEQUAL);
glDepthRange(0.0f, 1.0f);
glViewport(0, 0, 800, 600); // my display size
After that I bind my shaders:
int vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader , vertexShaderCode);
glCompileShader(vertexShader );
int fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader , fragmentShader Code);
glCompileShader(fragmentShader);
int program = glCreateProgram();
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glBindAttribLocation(program, 0, "vert");
glLinkProgram(program);
glDetachShader(program, vertexShader);
glDetachShader(program, fragmentShader);
This are the shaders:
colored.vert
#version 150
uniform mat4 camera;
uniform mat4 model;
uniform vec4 color;
in vec3 vert;
out vec4 fragColor;
void main() {
gl_Position = camera * model * vec4(vert, 1);
fragColor = color;
}
colored.frag
#version 150
in vec4 fragColor;
out vec4 finalColor;
void main() {
finalColor = fragColor;
}
Then I create vbo, ibo and vao:
// VBO
FloatBuffer vboBuffer = BufferUtils.createFloatBuffer(9);
vboBuffer.put(
0, 1, 0,
1, 0, 0,
-1, 0, 0);
vboBuffer.flip();
vbo = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, vboBuffer, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// IBO
ShortBuffer iboBuffer = BufferUtils.createShortBuffer(3);
iboBuffer.put(0, 1, 2);
iboBuffer.flip();
ibo = glGenBuffers();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, iboBuffer, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
// VAO
vao = glGenVertexArrays();
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glEnableVertexAttribArray(glGetAttribLocation(program, "vert"));
glVertexAttribPointer(glGetAttribLocation(program, "vert"), 3, GL_FLOAT, false, 3 * Float.SIZE, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBindVertexArray(0);
I have changed that as advised by Andon M. Coleman to match the core profile:
It does not resolve my issue!
// IBO
ShortBuffer iboBuffer = BufferUtils.createShortBuffer(3);
iboBuffer.put(0, 1, 2);
iboBuffer.flip();
// VBO
FloatBuffer vboBuffer = BufferUtils.createFloatBuffer(9);
vboBuffer.put(
0, 1, 0,
1, 0, 0,
-1, 0, 0);
vboBuffer.flip();
// VAO
vao = glGenVertexArrays();
glBindVertexArray(vao);
ibo = glGenBuffers();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, iboBuffer, GL_STATIC_DRAW);
vbo = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, vboBuffer, GL_STATIC_DRAW);
glEnableVertexAttribArray(glGetAttribLocation(program, "vert"));
glVertexAttribPointer(glGetAttribLocation(program, "vert"), 3, GL_FLOAT, false, 3 * Float.SIZE, 0);
glBindVertexArray(0);
Before rendering:
glUseProgram(program);
glUniform4f(glGetUniformLocation(program, "color"), colorR, colorG, colorB, colorA);
glUseProgram(0);
Rendering:
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(program);
FloatBuffer cameraBuffer = BufferUtils.createFloatBuffer(16);
// a simple orthographic camera at the position (0|0|1)
// left: -1; right: 1; bottom: -1; top: 1; zNear: -1; zFar: 1
cameraBuffer.put(
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, -1.0, 0.0,
0.0, 0.0, 1.0, 1.0);
cameraBuffer.flip();
glUniformMatrix4(glGetUniformLocation(program, "camera"), false, cameraBuffer);
FloatBuffer modelBuffer = BufferUtils.createFloatBuffer(16);
// no translation applied, so its an identity matrix
modelBuffer .put(
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0);
modelBuffer.flip();
glUniformMatrix4(glGetUniformLocation(program, "model"), false, modelBuffer);
glBindVertexArray(vao);
glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_SHORT, 0);
glBindVertexArray(0);
glUseProgram(0);
// check for OpenGL errors
int error_code = glGetError();
if(error_code != GL_NO_ERROR)
System.err.print("OpenGL Error: " + gluErrorString(error_code));
That are all OpenGL commands I do in the correct order.
GLIntercept Log:
GLIntercept Log
Error is resolved.
The problem was that glVertexAttribPointer() expects the stride to be in bytes, while Float.SIZE in Java returns the size in bits.
Wrong:
glVertexAttribPointer(..., ..., ..., ..., 3 * Float.SIZE, ...);
Right:
glVertexAttribPointer(..., ..., ..., ..., 3 * Float.SIZE / 8, ...);