glViewport inside loop cause smaller view port - c++

I've a simple OpenGL program running on macOS:
glUseProgram(program);
glViewport(0, 0, mWidth, mHeight);
glClearColor(0.25f, 0.25f, 0.25f, 1.0f);
// Rendering Loop
while (glfwWindowShouldClose(mWindow) == false) {
if (glfwGetKey(mWindow, GLFW_KEY_ESCAPE) == GLFW_PRESS) {
glfwSetWindowShouldClose(mWindow, true);
}
// why can't view port here?
glViewport(0, 0, mWidth, mHeight);
// Background Fill Color
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(vao);
glDrawArrays(GL_TRIANGLES, 0, 3);
// Flip Buffers and Draw
glfwSwapBuffers(mWindow);
glfwPollEvents();
}
If I comment out the glViewport call inside the loop, it works well, the triangle is rendered in the center of the window, but if I uncomment it, the triangle shows in the left bottom corner of the window.
Below is my shader and vertex data code:
const int mWidth = 1280;
const int mHeight = 800;
auto mWindow = glfwCreateWindow(mWidth, mHeight, "OpenGL", nullptr, nullptr);
const GLchar* vertex_shader_src =
"#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);\n"
"}";
const GLchar* fragment_shader_src =
"#version 330 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(1.0f, 0.5f, 0.2f, 1.0f);\n"
"}";
GLfloat vertices[] = {
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
0.0f, 0.5f, 0.0f
};
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float),
(void*)0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
Could somebody help me?

As #httpdigest commented, it is because macOS has 2x larger frame buffer size, if I query it with glfwGetFramebufferSize and use it as view port size, it works well!

Related

OpenGL: Program renders plain white square instead of a transformed square

I am trying to render a 2D square that has color and is transformed due to 4x4 matrices. The output is supposed to look like this:
However, I am instead getting this:
This is frustrating because I had this problem in another project. My professor walked me through how to solve it and all we did was play around with a few things here and there and it just magically worked, no code was altered. Now I am experiencing this issue again and there are no clues as to why this is happening. I typed the tutorial code correctly character for character and it is not being rendered correctly. It did however render correctly on my professor's computer.
Here is the code:
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <iostream>
#define WINDOW_TITLE "Modern OpenGL"
#ifndef GLSL
#define GLSL(Version, Source) "#version" #Version "\n" #Source
#endif
GLint shaderProgram, windowWidth = 800, windowHeight = 600;
GLuint VBO, VAO, EBO, texture;
void uResizeWindow(int, int);
void uRenderGraphics();
void uCreateShader();
void uCreateBuffers();
//Vertex shader source code
const GLchar* vertexShaderSource = GLSL(330,
layout(location = 0) in vec3 position;
layout(location = 1) in vec3 color;
out vec3 mobileColor;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main() {
gl_Position = projection * view * model * vec4(position, 1.0f); //transforms vertices to clip coordinates
mobileColor = color; //references incoming color data
});
//Fragment shader source code
const GLchar* fragmentShaderSource = GLSL(330,
in vec3 mobileColor;
out vec4 gpuColor;
void main() {
gpuColor = vec4(mobileColor, 1.0f);
});
int main(int argc, char** argv) {
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGBA);
glutInitWindowSize(windowWidth, windowHeight);
glutCreateWindow(WINDOW_TITLE);
glutReshapeFunc(uResizeWindow);
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK) {
std::cout << "Failed to initialize GLEW" << std::endl;
}
uCreateShader();
uCreateBuffers();
glUseProgram(shaderProgram);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glutDisplayFunc(uRenderGraphics);
glutMainLoop();
//Destroys buffer objects once used
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteBuffers(1, &EBO);
return 0;
}
void uResizeWindow(int w, int h) {
windowWidth = w;
windowHeight = h;
glViewport(0, 0, windowWidth, windowHeight);
}
void uRenderGraphics() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindVertexArray(VAO);
glm::mat4 model(1.0f);
model = glm::translate(model, glm::vec3(0.0f, 0.0f, 0.0f)); //place object at center of viewport
model = glm::rotate(model, 15.0f, glm::vec3(1.0f, 0.0f, 0.0f)); //rotate object 15 degrees on x-axis
model = glm::scale(model, glm::vec3(2.0f, 2.0f, 2.0f)); //increase object size by factor of 2
//transforms the camera
glm::mat4 view(1.0f);
view = glm::translate(view, glm::vec3(0.0f, 0.0f, -3.0f)); //moves camera backwards -3 units in z
//creates perspective projection
glm::mat4 projection(1.0f);
projection = glm::perspective(45.0f, (GLfloat)windowWidth / (GLfloat)windowHeight, 0.1f, 100.0f);
//retrieves and passes transform matrices to shader program
GLint modelLoc = glGetUniformLocation(shaderProgram, "model");
GLint viewLoc = glGetUniformLocation(shaderProgram, "view");
GLint projLoc = glGetUniformLocation(shaderProgram, "projection");
//draws the triangles
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glBindVertexArray(0); //deactive the vertex array object
glutSwapBuffers();
}
void uCreateShader() {
//vertex
GLint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
//fragment
GLint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
}
void uCreateBuffers() {
GLfloat vertices[] = {
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 0.0f, //top right vertex 0
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f, //bottom right vertex 1
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, //bottom left vertex 2
-0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 1.0f //top left vertex 3
};
GLuint indices[] = {
0, 1, 3, //triangle 1
1, 2, 3 //triangle 2
};
//gen buffer ids
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
//set attrib pointer 0 to hold pos data
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)0);
glEnableVertexAttribArray(0);
//set attrib pointer 1 to hold color data
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)(3 * sizeof(GLfloat)));
glEnableVertexAttribArray(1);
glBindVertexArray(0); //deactivate VAO
}
The vertex shader and the fragment shader cannot be compiled because a space is missing in the GLSL macro (after #version):
#define GLSL(Version, Source) "#version" #Version "\n" #Source
#define GLSL(Version, Source) "#version " #Version "\n" #Source
I recommend to check if the shader compilation succeeded and if the program object linked successfully.
If the compiling of a shader succeeded can be checked by glGetShaderiv and the parameter GL_COMPILE_STATUS. If the linking of a program was successful can be checked by glGetProgramiv and the parameter GL_LINK_STATUS.
See the answer to OpenGL ignores Quads and makes them Triangles.
The unit of the angle argument of glm::perspective is radian:
projection = glm::perspective(45.0f, (GLfloat)windowWidth / (GLfloat)windowHeight, 0.1f, 100.0f);
projection = glm::perspective(glm::radians(45.0f),
(GLfloat)windowWidth / (GLfloat)windowHeight, 0.1f, 100.0f);
You missed to set the matrix uniforms:
GLint modelLoc = glGetUniformLocation(shaderProgram, "model");
GLint viewLoc = glGetUniformLocation(shaderProgram, "view");
GLint projLoc = glGetUniformLocation(shaderProgram, "projection");
glUniformMatrix4fv(modelLoc, 1, GL_FALSE, glm::value_ptr(model));
glUniformMatrix4fv(viewLoc, 1, GL_FALSE, glm::value_ptr(view));
glUniformMatrix4fv(projLoc, 1, GL_FALSE, glm::value_ptr(projection));

Different fragment shader colors

I want to draw different figures with different colors using shaders.
I tried using uniform variables and using vertex attributes of the meshes to separate Vertex Buffer Objects but when I run the program only the second color is used and the first figure isn't shown at all. Im not sure if it's an issue with how I bind the shaders or with the way I use uniform variables. Thanks in advance.
Here is the code:
#include <GL/glew.h>
#include <GL/glut.h>
#include <stdio.h>
const char* vertexShaderSource = "#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);\n"
"}\0";
const char* fragmentShaderSource = "#version 330 core\n"
"in vec4 color;\n"
"out vec4 FragColor;\n"
"uniform vec3 figColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(figColor, 0.0f);\n"
"}\n\0";
// Set of vertices that draw the different figures:
float vertices1[] = {
-0.8f, 0.6f, 0.0f,
-0.8f, 0.4f, 0.0f,
-0.83f, 0.44f, 0.0f,
-0.87f, 0.51f, 0.0f,
-0.9f, 0.57f, 0.0f,
-0.93f, 0.63f, 0.0f,
-0.95f, 0.69f, 0.0f,
-0.97f, 0.75f, 0.0f,
-0.98f, 0.8f, 0.0f,
-0.91f, 0.8f, 0.0f,
-0.85f, 0.79f, 0.0f,
-0.8f, 0.77f, 0.0f,
};
float vertices2[] = {
-0.8f, -0.15f, 0.0f,
-1.0f, 0.0f, 0.0f,
-0.95f, 0.0f, 0.0f,
-0.86f, -0.01f, 0.0f,
-0.81f, -0.02f, 0.0f,
-0.75f, -0.03f, 0.0f,
};
GLuint vertexShader;
GLuint fragmentShader;
GLuint shaderProgram;
void display(void) {
// Background color.
glClearColor(1.0f, 1.0f, 1.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
GLint uniform = glGetUniformLocation(shaderProgram, "figColor");
glUniform3f(uniform, 1.0f, 0.0f, 0.0f);
glDrawArrays(GL_TRIANGLE_FAN, 0, 12);
glUniform3f(uniform, 0.0f, 0.749f, 1.0f);
glDrawArrays(GL_TRIANGLE_FAN, 0, 6);
glUseProgram(0);
glFlush();
}
// Main.
int main(int argc, char** argv) {
glutInit(&argc, argv);
// Color mode.
glutInitDisplayMode(GLUT_RGBA);
// Window size.
glutInitWindowSize(500, 500);
// Title.
glutCreateWindow("Tarea 3: Figura con curvas");
GLenum err = glewInit();
if (err != GLEW_OK) {
printf("glewInit failed: %s", glewGetErrorString(err));
exit(1);
}
// Compile vertex shader.
vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
// Check for vertex shader compilation errors.
int success;
char infoLog[512];
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(vertexShader, 512, NULL, infoLog);
printf("ERROR::SHADER::VERTEX::COMPILATION_FAILED\n");
}
// Compile fragment shader.
fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
// Check for fragment shader compile errors.
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(fragmentShader, 512, NULL, infoLog);
printf("ERROR::SHADER::FRAGMENT::COMPILATION_FAILED\n");
}
// Link and delete shaders.
shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
//Separating the Vertex Buffer Objects.
GLuint VBOs[2];
glBindBuffer(GL_ARRAY_BUFFER, VBOs[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices1), vertices1, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, VBOs[1]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices2), vertices2, GL_STATIC_DRAW);
// Specify a VertexArrayObject for each mesh.
GLuint VAOs[2];
glGenVertexArrays(2, VAOs);
glBindVertexArray(VAOs[0]);
glBindBuffer(GL_ARRAY_BUFFER, VBOs[0]);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glBindVertexArray(VAOs[1]);
glBindBuffer(GL_ARRAY_BUFFER, VBOs[1]);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
// Bind the VAO before the draw call.
glBindVertexArray(VAOs[0]);
glDrawArrays(GL_TRIANGLE_FAN, 0, 12);
glBindVertexArray(VAOs[1]);
glDrawArrays(GL_TRIANGLE_FAN, 0, 24);
// Displaying the figures.
glutDisplayFunc(display);
glutMainLoop();
}
You missed to generate the vertex buffer object names:
//Separating the Vertex Buffer Objects.
GLuint VBOs[2];
glGenBuffers(2, VBOs); <----
You have to bind the vertex array before drawing the mesh in the main loop:
GLuint VAOs[2];
void display(void) {
// Background color.
glClearColor(1.0f, 1.0f, 1.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
GLint uniform = glGetUniformLocation(shaderProgram, "figColor");
glUniform3f(uniform, 1.0f, 0.0f, 0.0f);
glBindVertexArray(VAOs[0]); // <---
glDrawArrays(GL_TRIANGLE_FAN, 0, 12);
glUniform3f(uniform, 0.0f, 0.749f, 1.0f);
glBindVertexArray(VAOs[1]); // <----
glDrawArrays(GL_TRIANGLE_FAN, 0, 6);
glUseProgram(0);
glFlush();
}
int main(int argc, char** argv) {
// [...]
// Specify a VertexArrayObject for each mesh.
// GLuint VAOs[2]; <---- DELETE
glGenVertexArrays(2, VAOs);
// [...]
// DELETE
/*
// Bind the VAO before the draw call.
glBindVertexArray(VAOs[0]);
glDrawArrays(GL_TRIANGLE_FAN, 0, 12);
glBindVertexArray(VAOs[1]);
glDrawArrays(GL_TRIANGLE_FAN, 0, 24);
*/
// [...]

How to translate and rotate a Triangle in OpenGL over time?

I am trying to rotate and translate my single triangle over time. I have already written the main.cpp and I have written separate files for my Vertex and Fragment shader source code.
Here is the code in my main.cpp file:
void framebuffer_size_callback(GFLWwindow* window, int width, int height);
void processInput(GLFWwindow *window);
// Shaders
const char *vertexShaderSource =
"#version 410\n"
"in vec3 vp;\n"
"void main()\n"
"{\n"
"gl_Position = vec4(aPos, 1.0);\n"
"}\0";
const char *fragmentShaderSource =
"#version 410\n"
"out vec4 FragColor;\n"
"in vec3 myColor;\n"
"void main()\n"
"{\n"
"FragColor = vec4(myColor, 1.0f);\n"
"}\n\0";
int main ()
{
// start GL context and O/S window using the GLFW helper library
if (!glfwInit ())
{
fprintf (stderr, "ERROR: could not start GLFW3\n");
return 1;
}
// uncomment these lines if on Apple OS X
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(640, 480, "LearnOpenGL", NULL, NULL);
if (!window)
{
fprintf(stderr, "ERROR: could not open window with GLFW3\n");
glfwTerminate();
return 1;
}
glfwMakeContextCurrent(window);
// start GLEW extension handler
glewExperimental = GL_TRUE;
glewInit();
// get version info
const GLubyte* renderer = glGetString(GL_RENDERER);
const GLubyte* version = glGetString(GL_VERSION);
printf("Renderer: %s\n", renderer);
printf("OpenGL version supported %s\n", version);
glEnable(GL_DEPTH_TEST); // enable depth-testing
glDepthFunc(GL_LESS);
// Draw a single triangle
float points[] = {
// positions // colors
0.0f, 0.5f, 0.0f, 1.0f, 0.0f, 0.0f,
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f,
-0.5f, -0.5f, 0.0f, 0.0f 0.0f, 1.0f
};
GLuint VBO = 0;
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(points), points, GL_STATIC_DRAW);
// Generate a VAO.
GLuint VAO = 0;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(points), points, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(0);
// Compile a Vertex Shader
int vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
// Compile a fragment shader.
int fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
// Compile shaders into a executable shader program.
int shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, fragmentShader);
glAttachShader(shaderProgram, vertexShader);
glLinkProgram(shaderProgram);
// Create another float array to make my triangle fan.
float points_5_triangles[] = {
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f,
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f,
0.0f, 0.5f, 0.0f, 1.0f, 2.0f, 0.5f,
// Another triangle made from point 1, 3, and 4
-0.5f,
}
// Generate another VBO for my Triangle Fan
GLuint VBO_5_triangles = 0;
glGenBuffers(1, &VBO_5_triangles);
glBindBuffer(GL_ARRAY_BUFFER, VBO_5_triangles);
glBufferData(GL_ARRAY_BUFFER, sizeof(points_5_triangles), points_5_triangles, GL_STATIC_DRAW);
// Generate another VAO for my Triangle Fan
GLuint VAO_5_triangles = 0;
glGenVertexArrays(1, &VAO_5_triangles);
glBindVertexArray(VAO_5_triangles);
glBindBuffer(GL_ARRAY_BUFFER, VBO_5_triangles);
glBufferData(GL_ARRAY_BUFFER, sizeof(points_5_triangles), points_5_triangles, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(0);
// Drawing the triangles aka render loop
while (!glfwWindowShouldClose(window))
{
processInput(window);
// wipe the drawing surface clear
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Draw Triangle
glUseProgram(shaderProgram);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
// Draw Triangle Fan
glBindVertexArray(VAO_5_triangles);
glDrawArrays(GL_TRIANGLE_FAN, 0, 7);
// swap buffers and poll IO events
glfwPollEvents();
glfwSwapBuffers(window);
}
// close GL context and any other GLFW resources
glfwTerminate();
return 0;
}
The tutorial I am following does go over transformations but in the example it uses, the triangles have textures as well as shaders. For my purposes, I want to do this without textures added to my code.
Can someone walk me through how to add 2 transformations: translate and rotation to my "single triangle" as shown in this code?
Your triangle having a texture or not has nothing to do with transformation.
You rotate your triangle simply by calculating a transformation matrix, passing it to your vertex shader and multiply it with your coordinates.
Your Vertex Shader should look look something like this:
const char *vertexShaderSource =
"#version 410\n"
"layout (location = 0) in vec3 vp;\n"
"uniform mat4 transform;"
"void main()\n"
"{\n"
" gl_Position = transform * vec4(vp, 1.0);\n"
"}\0";
I recommend you to use the glm library for that. You calculate your matrix and pass it to your shader like this:
auto transformMatrix = glm::rotate( /* your rotation calculation */ );
auto transLoc = glGetUniformLocation(shaderProgram, "transform");
glUniformMatrix4fv(transLoc , 1, GL_FALSE, glm::value_ptr(transformMatrix));

Qt FrameBuffer Object don't render even vertices

I'm learning to use OpenGL in Qt with the QOpenGLFramebufferObject, and tried to draw a triangle using the following code:
In render() :
glUseProgram(m_program);
GLfloat vertices[] = {
-1.0f, -1.0f, // first
0.0f, -1.0f, // second
0.0f, 1.0f // third
};
unsigned int VAO;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float), vertices);
glBindVertexArray(0);
glDisable(GL_DEPTH_TEST);
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
And the shaders are in initShader() :
const GLchar* vfSource[] = {
"#version 330 core\n"
"layout (location = 0) in vec2 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, 0.0, 1.0);\n"
"}\n\0"
};
const GLchar* fsSource[] = {
"#version 330 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(1.0f, 0.5f, 0.2f, 1.0f);\n"
"}\n\0"
};
Only the first and third vertex rendered correctly. The second vertex were located in the center of my screen.
And if I changed the vertices[] to
-1.0f, -1.0f, // 1
0.0f, -1.0f, // 2
0.0f, 1.0f // 3
1.0f, 1.0f, // 4
1.0f, 0.0f // 5
and the last line to
glDrawArrays(GL_TRIANGLES, 0, 5);
The output is a triangle produced by the data in line 1 3 and 5.
I have no idea about what's wrong with this code. Anyone can help me?
If more code is needed, just let me know.
The OpenGL 3.3 Core Profile (which your shaders are targeting) doesn't allow you to draw directly from client memory. The last parameter of glVertexAttribPointer is a offset into the currently bound GL_ARRAY_BUFFER. Setting it to something else than zero should trigger a GL_INVALID_OPERATION when no GL_ARRAY_BUFFER is bound.
In order to get your example working your need to generate a Vertex Buffer Object (VBO) and attach it to the VAO.
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(float), 0);
glBindVertexArray(0);

Garbled Triangles from VBO/Shader/MVP Matrix using OpenGL 3.3

Drawing with a regular VBO on older GLSL versions is not a problem but for whatever reason I get this result when using GLSL 3.3.
It should be drawing a 2x2 plane on each axis.
(Lighter colors are closer to the far plane, darker are closer to the near plane)
One of the major changes with 3.3 was that you have to provide uniforms with your Model View Projection matrixes as opposed to using the old provided ones.
I don't know what I'm doing wrong but I'm almost certain it's something to do with the Model View Projection data. Here is the relevant code.
Main draw method
float r = 0.0f;
void display() {
glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // Set background color to black and opaque
glClear(GL_COLOR_BUFFER_BIT); // Clear the color buffer
glClear(GL_DEPTH_BUFFER_BIT); // And the depth buffer
r += 0.001f;
glUseProgram(program);
glEnable(GL_DEPTH_TEST);
GLuint uniformModel = glGetUniformLocation(program, "model");
GLuint uniformView = glGetUniformLocation(program, "view");
GLuint uniformProjection = glGetUniformLocation(program, "projection");
glm::mat4 projection = glm::perspective(70.0f, 1.0f, 0.0f, 16.0f);
glUniformMatrix4fv(uniformProjection, 1, GL_FALSE, glm::value_ptr(projection));
glm::vec3 eye = glm::vec3(sin(r*0.33)*5.0f,5,cos(r*0.33)*5.0f);
glm::vec3 center = glm::vec3(0.0f,0.0f,0.0f);
glm::vec3 up = glm::vec3(0.0f,0.0f,1.0f);
glm::mat4 view = glm::lookAt(eye, center, up);
glUniformMatrix4fv(uniformView, 1, GL_FALSE, glm::value_ptr(view));
glm::mat4 model = glm::translate(glm::mat4(1.0f), glm::vec3(0.0f,0.0f,0.0f));
glUniformMatrix4fv(uniformModel, 1, GL_FALSE, glm::value_ptr(model));
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(
0,
3,
GL_FLOAT,
GL_FALSE,
0,
(void*)0
);
glDrawElements(GL_TRIANGLES, 18, GL_UNSIGNED_INT, indices);
glDisableVertexAttribArray(0);
glDisable(GL_DEPTH_TEST);
glUseProgram(0);
glFlush(); // Render now
}
Vertex/Indices Array & Shader
string vert
= "#version 330 core\n"
"layout(location = 0) in vec3 vertex;\n"
"uniform mat4 model;\n"
"uniform mat4 view;\n"
"uniform mat4 projection;\n"
"void main(){\n"
" gl_Position = projection * view * model * vec4(vertex,1.0f);\n"
"}";
string frag
= "#version 330 core\n"
"out vec3 color;\n"
"void main()\n"
"{\n"
" float lin = 1.0 / gl_FragCoord.w;\n"
" float depth = (lin - 0.1) / (16.0 - 0.1);\n"
" color = vec3(depth,depth,1.0f);\n"
"}";
float* data = new float[36] {
-1.0f,-1.0f,0.0f,
1.0f,-1.0f,0.0f,
1.0f,1.0f,0.0f,
-1.0f,1.0f,0.0f,
0.0f,-1.0f,-1.0f,
0.0f,1.0f,-1.0f,
0.0f,1.0f,1.0f,
0.0f,-1.0f,1.0f,
-1.0f,0.0f,-1.0f,
1.0f,0.0f,-1.0f,
1.0f,0.0f,1.0f,
-1.0f,0.0f,1.0f
};
GLuint* indices = new GLuint[18] {
0,1,2,
0,3,2,
4,5,6,
4,7,6,
8,9,10,
8,11,10
};
Init
const int winSize = 1024;
void glInit(int argc, char** argv) {
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE);
glutInitContextVersion(3,3);
//glutInitContextFlags(GLUT_CORE_PROFILE | GLUT_DEBUG);
glutInitWindowSize(winSize, winSize);
glutInitWindowPosition(25, 25);
glutCreateWindow("Loading...");
glewExperimental = GL_TRUE;
glewInit();
glViewport (0, 0, winSize, winSize);
camera.setPosition(0.0f,0.0f,4.0f);
glGenVertexArrays(1, &vba);
glBindVertexArray(vba);
program = compileShader(vert, frag);
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, 3*4*3*4, data, GL_STATIC_DRAW);
glEnable(GL_DEPTH_TEST);
while(!bExit) {
string pre;
pre.assign("Test Program - ");
pre.append(std::to_string(fps));
glutSetWindowTitle(pre.c_str());
frames++;
display();
}
}
Turns out I'm an idiot and it was just a typo kind of error.
I define GLuint vbo in the init method AND on the scope of the program as well but the actual value of the vbo is only put into local variable. display() didn't have visibility of the vbo at all.
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
It's always the little things that cause ridiculous bugs. <_>