OpenGL shows nothing - opengl

I have a problem while trying to render a triangle with OpenGL using LWJGL.
It's doing nothing, it's neither render something nor does it throw any error.
glClear() is working (if I change the color, the color changes).
You can find a GLIntercept Log here:
GLIntercept Log
This is my OpenGL Initialization-Code:
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glFrontFace(GL_CW);
glEnable(GL_DEPTH_TEST);
glDepthMask(true);
glDepthFunc(GL_LEQUAL);
glDepthRange(0.0f, 1.0f);
glViewport(0, 0, 800, 600); // my display size
After that I bind my shaders:
int vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader , vertexShaderCode);
glCompileShader(vertexShader );
int fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader , fragmentShader Code);
glCompileShader(fragmentShader);
int program = glCreateProgram();
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glBindAttribLocation(program, 0, "vert");
glLinkProgram(program);
glDetachShader(program, vertexShader);
glDetachShader(program, fragmentShader);
This are the shaders:
colored.vert
#version 150
uniform mat4 camera;
uniform mat4 model;
uniform vec4 color;
in vec3 vert;
out vec4 fragColor;
void main() {
gl_Position = camera * model * vec4(vert, 1);
fragColor = color;
}
colored.frag
#version 150
in vec4 fragColor;
out vec4 finalColor;
void main() {
finalColor = fragColor;
}
Then I create vbo, ibo and vao:
// VBO
FloatBuffer vboBuffer = BufferUtils.createFloatBuffer(9);
vboBuffer.put(
0, 1, 0,
1, 0, 0,
-1, 0, 0);
vboBuffer.flip();
vbo = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, vboBuffer, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// IBO
ShortBuffer iboBuffer = BufferUtils.createShortBuffer(3);
iboBuffer.put(0, 1, 2);
iboBuffer.flip();
ibo = glGenBuffers();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, iboBuffer, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
// VAO
vao = glGenVertexArrays();
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glEnableVertexAttribArray(glGetAttribLocation(program, "vert"));
glVertexAttribPointer(glGetAttribLocation(program, "vert"), 3, GL_FLOAT, false, 3 * Float.SIZE, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBindVertexArray(0);
I have changed that as advised by Andon M. Coleman to match the core profile:
It does not resolve my issue!
// IBO
ShortBuffer iboBuffer = BufferUtils.createShortBuffer(3);
iboBuffer.put(0, 1, 2);
iboBuffer.flip();
// VBO
FloatBuffer vboBuffer = BufferUtils.createFloatBuffer(9);
vboBuffer.put(
0, 1, 0,
1, 0, 0,
-1, 0, 0);
vboBuffer.flip();
// VAO
vao = glGenVertexArrays();
glBindVertexArray(vao);
ibo = glGenBuffers();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, iboBuffer, GL_STATIC_DRAW);
vbo = glGenBuffers();
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, vboBuffer, GL_STATIC_DRAW);
glEnableVertexAttribArray(glGetAttribLocation(program, "vert"));
glVertexAttribPointer(glGetAttribLocation(program, "vert"), 3, GL_FLOAT, false, 3 * Float.SIZE, 0);
glBindVertexArray(0);
Before rendering:
glUseProgram(program);
glUniform4f(glGetUniformLocation(program, "color"), colorR, colorG, colorB, colorA);
glUseProgram(0);
Rendering:
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(program);
FloatBuffer cameraBuffer = BufferUtils.createFloatBuffer(16);
// a simple orthographic camera at the position (0|0|1)
// left: -1; right: 1; bottom: -1; top: 1; zNear: -1; zFar: 1
cameraBuffer.put(
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, -1.0, 0.0,
0.0, 0.0, 1.0, 1.0);
cameraBuffer.flip();
glUniformMatrix4(glGetUniformLocation(program, "camera"), false, cameraBuffer);
FloatBuffer modelBuffer = BufferUtils.createFloatBuffer(16);
// no translation applied, so its an identity matrix
modelBuffer .put(
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0);
modelBuffer.flip();
glUniformMatrix4(glGetUniformLocation(program, "model"), false, modelBuffer);
glBindVertexArray(vao);
glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_SHORT, 0);
glBindVertexArray(0);
glUseProgram(0);
// check for OpenGL errors
int error_code = glGetError();
if(error_code != GL_NO_ERROR)
System.err.print("OpenGL Error: " + gluErrorString(error_code));
That are all OpenGL commands I do in the correct order.
GLIntercept Log:
GLIntercept Log

Error is resolved.
The problem was that glVertexAttribPointer() expects the stride to be in bytes, while Float.SIZE in Java returns the size in bits.
Wrong:
glVertexAttribPointer(..., ..., ..., ..., 3 * Float.SIZE, ...);
Right:
glVertexAttribPointer(..., ..., ..., ..., 3 * Float.SIZE / 8, ...);

Related

OpenGL mix fix pipeline and shader program (Qt)

I'm working on a old code that used fixed function pipeline, the scene is a bit complex but works fine. For the sake of simplicity, I replaced it with one blue triangle :
void RenduOpenGL::paintGL()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
glViewport(0, 0, this->width(), this->height());
glBegin(GL_TRIANGLES);
glColor3d(0,0,1);
glVertex3d(0.7, 0.7, 0.0);
glVertex3d(-0.5, 0.7, 0.0);
glVertex3d(0.1, -0.7, 0.0);
glEnd();
}
Now I want to add shaders for new elements in the scene but keep the old elements of the scene like this blue triangle.
I've read here that I can mix the two to produce a scene containing the first then the second.
Therefore I want to add this code after the blue triangle :
float vertices[] = {
0.6, 0.6, 0.0,
-0.6, 0.6, 0.0,
0.0, -0.6, 0.0,
};
vbo.create(); // glGenBuffers(...);
vbo.bind(); // glBindBuffer(GL_ARRAY_BUFFER, vbo);
vbo.allocate(vertices, sizeof(vertices)); // glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(float), vertices, GL_STATIC_DRAW);
vbo.release(); // glBindBuffer(GL_ARRAY_BUFFER, 0);
prog.addShaderFromSourceFile(QOpenGLShader::Vertex, "shaders/base.vert");
prog.addShaderFromSourceFile(QOpenGLShader::Fragment, "shaders/base.frag");
vao.create(); // glGenVertexArrays(...)
vao.bind(); // glBindVertexArray(vao);
prog.enableAttributeArray("position"); // glEnableVertexAttribArray(VAO_position);
prog.setAttributeBuffer("position", GL_FLOAT, 0, 3); // (offset, size, stride=0); // glVertexAttribPointer(VAO_position, 4, GL_FLOAT, False, 0, reinterpret_cast<const void *>(offset)(0)); (False,
vao.release(); // glBindVertexArray(0);
// draw the triangle
prog.bind(); // glUseProgram(shader_program);
vao.bind(); // glBindVertexArray(vertex_array_object);
glDrawArrays(GL_TRIANGLES, 0, 3);
vao.release(); // glBindVertexArray(0);
prog.release(); // glUseProgram(0);
I use Qt to call the openGL functions, the corresponding opengl functions are in comments.
My shaders are very basic :
// base.vert
#version 330
// vertex shader
in vec3 position;
void main() {
gl_Position = vec4(position.xyz, 1);
}
// base.frag
#version 330
// fragment shader
out vec4 pixel;
void main() {
pixel = vec4(1, 0.5, 0, 1);
}
That is supposed to draw an orange triangle, but when I put the code after the blue triangle code, I don't see the orange triangle created from shaders.
Short (with code) answer:
The VBO and the prog.enableAttributeArray and prog.setAttributeBuffer should be in the VAO.
Something along the lines:
float vertices[] = {
0.6, 0.6, 0.0,
-0.6, 0.6, 0.0,
0.0, -0.6, 0.0,
};
prog.bind(); // glUseProgram(shader_program);
vao.create(); // glGenVertexArrays(...)
vao.bind(); // glBindVertexArray(vao);
vbo.create(); // glGenBuffers(...);
vbo.bind(); // glBindBuffer(GL_ARRAY_BUFFER, vbo);
vbo.allocate(vertices, sizeof(vertices)); // glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(float), vertices, GL_STATIC_DRAW);
//vbo.release(); // glBindBuffer(GL_ARRAY_BUFFER, 0);
prog.addShaderFromSourceFile(QOpenGLShader::Vertex, "shaders/base.vert");
prog.addShaderFromSourceFile(QOpenGLShader::Fragment, "shaders/base.frag");
prog.enableAttributeArray("position"); // glEnableVertexAttribArray(VAO_position);
prog.setAttributeBuffer("position", GL_FLOAT, 0, 3); // (offset, size, stride=0); // glVertexAttribPointer(VAO_position, 4, GL_FLOAT, False, 0, reinterpret_cast<const void *>(offset)(0)); (False,
vao.release(); // glBindVertexArray(0);
// draw the triangle
prog.bind(); // glUseProgram(shader_program);
vao.bind(); // glBindVertexArray(vertex_array_object);
glDrawArrays(GL_TRIANGLES, 0, 3);
vao.release(); // glBindVertexArray(0);
prog.release(); // glUseProgram(0);
Not so long but textual answer: OpenGL is a state machine, you need to link together: the VBO and how to read its data, inside the VAO. However, IMHO, Qt people have sadly chosen their abstractions poorly: enableAttributeArray and setAttributeBuffer would be clearer as members of the VAO class instead of the prog class.

Using and binding multiple vbos in OpenGL

I am trying to experiment and learn how to use multiple VBOs to draw very different objects in OpenGL. The first VBO is just a text renderer, and for now the shader to be used with the main VBO is just a basic vertex shader
attribute vec3 vPosition;
void main()
{
gl_Position = vec4(vPosition, 1.0);
}
Test Fragment Shader:
void main()
{
gl_FragColor = vec4(1.0, 1.0, 0.0, 1.0);
}
The code renders the text fine, but as soon as I call UseProgram and bind the VAO, I get a black screen.
Here's my initialization of said VAO:
//random polygon
points[0]= vec3(-0.5, 0.5, 0.3);
points[1]= vec3(-0.5, -0.5, 0.2);
points[2]= vec3(0.5, -0.5, 0.3);
points[3]= vec3(0.5, 0.5, 0.4);
//Returns a uInt name for a shader program
shader = InitShader("vshader.glsl", "fshader.glsl");
glGenVertexArrays(1, &vao);
glGenBuffers(1, &buffer);
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(points), points, GL_STATIC_DRAW);
GLuint loc = glGetAttribLocation(shader, "vPosition");
glEnableVertexAttribArray(loc);
glVertexAttribPointer(loc, 3, GL_FLOAT, GL_FALSE, 3*sizeof(float), BUFFER_OFFSET(0));
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
glClearColor(1.0f, 1.0f, 1.0f, 1.0f); // white background
Where vao and buffer are previously uninitialized GLuints, and after debugging they do not return -1.
And here is my code to draw
glUseProgram(shader);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBindVertexArray(vao);
glDrawArrays(GL_POLYGON, 0, 4);
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
Thanks for all the help everyone, I figured out that I needed to call glFlush() before binding my second VBO and VAO and drawing them.

Cannot Link GLSL Program

I can't get the following Code to work. I want to render the triangle I describe in position[].
The program gives me sometimes a shader compile error or a program linking error and sometimes even both without me changing the code in between.
Program:
Window window(TITLE, WIDTH, HEIGHT) // Context and glew gets init here
float position[] = {-0.5, -0.5, 0.0,
0.5, -0.5, 0.0,
0.0, 0.5, 0.0};
// Shader program gets init
sID = glCreateProgram();
vertexShaderID = glCreateShader(GL_Vertex_SHADER);
fragShaderID = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(vertexShaderID, 1, vertexShaderCodeString); // The Code strings are valid, I printed them out
glShaderSource(fragmentShaderID, 1, fragShaderCodeString);
glCompileShader(vertexShaderID);
glCompileShader(fragShaderID);
shaderDidCompileCheck(vertexShaderID); // A function I wrote that checks for errors
shaderDidCompileCheck(fragShaderID);
glAttachShader(sID, vertexShaderID);
glAttachShader(sID, fragShaderID);
glLinkProgram(sID);
programDidLinkCheck(); // A function I wrote that checks for errors
glValidateProgram(sID);
glUseProgram(sID);
glBindAttribLocation(sID, 0, "position");
glUseProgram(0);
// Defining VBOs and VAOs
int bufferID;
glGenBuffers(1, &bufferID);
glBindBuffer(GL_ARRAY_BUFFER, bufferID);
glBufferData(GL_ARRAY_BUFFER, 3 * 3 * sizeof(float), mDataPtr, GL_STATIC_DRAW); // I have 3 vertices with 3 coordinates each
glBindBuffer(GL_ARRAY_BUFFER, 0);
int vaoID;
glGenVertexArrays(1, &vaoID);
glBindVertexArray(vaoID);
glBindBuffer(GL_ARRAY_BUFFER, bufferID);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL); // I Want to load it to the index 0 of the VAO, the vertex size is 3, the data type is GL_FLOAT
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
glBindVertexArray(vaoID);
glEnableVertexAttribArray(0);
glBindVertexArray(0);
// Main loop
while(!window.close()) {
glClearColor(0.3, 0.8, 0.6, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(sID);
glBindVertexArray(vaoID);
glDrawArrays(GL_TRIANGLES, 0, 1);
glBindVertexArray(0);
glUseProgram(0);
glfwSwapBuffers(mWindow);
glfwPollEvents();
}
Vertex Shader:
#version 410
in vec3 position;
out vec3 color;
void main() {
gl_Position = vec4(position, 1.0);
color = vec3(position.x + 0.5, 0.5, position.y + 0.5);
}
Fragment Shader:
#version 410
in vec3 color;
out vec4 outputColor;
void main() {
outputColor = vec4(color, 1.0);
}
I think there are a few typos in your code. You write "vertexShaderID = glCreateShader(GL_Vertex_SHADER)", but macros are indicated in capital letters -> "GL_VERTEX_SHADER". Try glShaderSource(vertexShaderID, 1, vertexShaderCodeString, NULL), instead of passing 3 parametrs.
How did you saved your sourcecode of the shaders ? Make shure to have \n
after the version declaration, otherwise you will end up having the code in the same line as the #, which is bad.

C++ openGL static window with VBOs and shaders

So I've been building a pretty simple piece of code that renders .OBJ files and lets the user look around using VBOs and the most simple GLSL shaders immaginable, but when run the window is just a static image (looks like the object seen from 0,0,0). Without the shader program it still renders normally, just without color. Any idea what I colud be doing wrong?
VBO & shader stuff:
//Create buffer for vertex data (x,y,z)
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(vector3f), &vertices[0], GL_STATIC_DRAW);
//Create buffer for color data (r,g,b)
glGenBuffers(1, &colorBuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glBufferData(GL_ARRAY_BUFFER, temp.size() * sizeof(vector3f), &temp[0], GL_STATIC_DRAW);
//Create and compile shaders
GLuint vertShader = 0, fragShader = 0, program = 0;
createShader("vertex.glsl", GL_VERTEX_SHADER, vertShader);
createShader("fragment.glsl", GL_FRAGMENT_SHADER, fragShader);
program = glCreateProgram();
glAttachShader(program, vertShader);
glAttachShader(program, fragShader);
glBindAttribLocation(program, vertexAttribIndex, "vertex_position");
glBindAttribLocation(program, colorAttribIndex, "vertex_colour");
glLinkProgram(program);
printShaderInfoLog(vertShader);
printProgramInfoLog(program);
//Set attribute pointers for GLSL
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(vertexAttribIndex, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(vertexAttribIndex);
glBindBuffer(GL_ARRAY_BUFFER, colorBuffer);
glVertexAttribPointer(colorAttribIndex, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(colorAttribIndex);
Render loop:
while (!glfwWindowShouldClose(window)) {
glClearColor(0.0F, 0.0F, 0.0F, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glUseProgram(program);
glViewport(0, 0, width, height);
gluPerspective(70.0, width / height, 1.0, 30.0);
glMatrixMode(GL_MODELVIEW);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glDrawArrays(GL_TRIANGLES, 0, vertBufferSize);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_COLOR_ARRAY);
glfwSwapBuffers(window);
glfwPollEvents();
}
Vertex shader:
#version 420
layout(location = 0) in vec3 vertex_position;
layout(location = 1) in vec3 vertex_colour;
out vec3 colour;
out vec4 gl_Position;
void main () {
colour = vertex_colour;
gl_Position = vec4 (vertex_position, 1.0);
}
Fragment shader:
#version 420
in vec3 colour;
out vec4 colorOut;
void main () {
colorOut = vec4(colour, 1.0);
}
Sorry if I've missed out anything obvious, I've been fighting this for hours.

OpenGL Program Does Not Render

I have a Mac running OS X Yosemite with GLFW 3 and OpenGL 4.1. I've got this program:
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtx/string_cast.hpp>
#include <stdio.h>
#include "myglutils.hpp"
int main() {
if (!setupGLFW()) return 1;
setupApple();
GLFWwindow* window = glfwCreateWindow(640, 480, "Perspective", 0, NULL);
if (!window) {
fprintf(stderr, "Failed to create window.\n");
glfwTerminate();
return 0;
}
glfwMakeContextCurrent(window);
if (!setupGLEW()) return 1;
glClearColor(0.2, 0.0, 0.8, 1.0);
const GLfloat vertices[] = {
-1.0, -1.0,
1.0, -1.0,
0.0, 1.0
};
const GLchar* vert_shader =
"#version 410 core\n"
"in vec2 pos;"
"uniform mat4 MVP;"
"void main() {"
" gl_Position = MVP * vec4(pos, 0.0, 1.0);"
"}";
const GLchar* frag_shader =
"#version 410 core\n"
"out vec4 color;"
"void main() {"
" color = vec4(1.0, 0.0, 0.0, 1.0);"
"}";
GLuint shader = getShader(vert_shader, frag_shader);
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(GLfloat), vertices, GL_STATIC_DRAW);
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, NULL);
GLuint MVPID = glGetUniformLocation(shader, "MVP");
glm::mat4 perspective = glm::perspective(45.0, 4.0 / 3.0, 0.1, 100.0);
glm::mat4 view = glm::lookAt(glm::vec3(4.0, 3.0, 3.0), glm::vec3(0.0, 0.0, 0.0), glm::vec3(0.0, 1.0, 0.0));
glm::mat4 MVP = perspective * view;
while (!glfwWindowShouldClose(window) && glfwGetKey(window, GLFW_KEY_ESCAPE) != GLFW_PRESS) {
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shader);
glUniformMatrix4fv(MVPID, 1, GL_FALSE, &MVP[0][0]);
glBindVertexArray(vao);
glDrawArrays(GL_TRIANGLES, 0, 6);
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &vao);
glDeleteProgram(shader);
glfwTerminate();
return 0;
}
The functions setupGLFW(), setupApple(), and setupGLEW() aren't the problem. Neither is getShader(). I feel like my error is something silly like forgetting a simple OpenGL function call. What I'm trying to do is draw a triangle at a 3D angle.
Note: this program compiles perfectly, but only displays an empty blue screen.
There are a number of issues in this code. The main one is that you never enable the vertex attribute array. You will need to add a glEnableVertexAttribArray() call in the setup code:
glBindVertexArray(vao);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(0);
Also, the arguments to glDrawArrays() are not correct:
glDrawArrays(GL_TRIANGLES, 0, 6);
The 3rd argument is the number of vertices. Since you only have 3 vertices, it should be:
glDrawArrays(GL_TRIANGLES, 0, 3);
Also, unless this is taken care of in code you did not post, you need to make sure that the attribute location you are using matches the program. You use 0 in the code, but you can't count on the location being zero without any further action. The easiest way to fix this is to use a location layout qualifier in the shader code:
"layout(location=0) in vec2 pos;"
This will set the location to 0, matching your code.