"First-chance exception at 0x00000000" What am I doing wrong? - c++

This is my code so far, it's just tutorial code from open.gl
#include <GL/GL.h>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <thread>
#define GLEW_STATIC
int main(){
//printf("%s\n", glGetString(GL_EXTENSIONS))
glewExperimental = GL_TRUE;
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow* window = glfwCreateWindow(800, 600, "OpenGL", nullptr, nullptr); // Windowed
//GLFWwindow* window = glfwCreateWindow(1920, 1080, "OpenGL", glfwGetPrimaryMonitor(), nullptr); fs
//typedef void (*GENBUFFERS) (GLsizei, GLuint*);
//GENBUFFERS glGenBuffers = (GENBUFFERS)glfwGetProcAddress("glGenBuffers");
printf("%s\n", glGetString(GL_EXTENSIONS));
glfwMakeContextCurrent(window);
glewInit();
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
printf("%u\n", vertexBuffer);
float vertices[] = {
0.0f, 0.5f, // Vertex 1 (X, Y)
0.5f, -0.5f, // Vertex 2 (X, Y)
-0.5f, -0.5f // Vertex 3 (X, Y)
};
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
//glfwSetWindowShouldClose(window, GL_FALSE);
while(!glfwWindowShouldClose(window)){
glfwSwapBuffers(window);
glfwPollEvents();
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
}
glfwTerminate();
}
returns the exception above... what is going on and what's wrong?
the error drops at glGenBuffers(1,&vertexBuffer);
I'm still very new to opengl programming so please break it down into autismally small pieces for me if you can :)

An error occurring at location 0 suggests you're trying to call a function using a null function pointer. The culprit in your case seems to be glGenBuffers. It's been in the core for a long time, so it's almost certainly supported by your machine. It probably just hasn't been initialised properly.
You can get libraries such as GLEW (GL Extension Wrangler) to help you with that. It's not something you should rely on long-term, but can definitely help you get started without worrying about lots of details.
Alternatively, a very quick-and-dirty solution may be just to call glGenBuffersARB() instead. Again, that's definitely not something to rely on long-term though!
These questions may help:
glGenBuffers not defined?
OpenGL: How to check if the user supports glGenBuffers()?

Related

Do I need to use a shader for OpenGL 3.3 Core?

I am trying to make a triangle and I'm using GLEW, GLFW and GLM as extensions of OpenGL.
Here's the code I have:
#include <stdlib.h>
#include <stdio.h>
#include <gl\glew.h>
#include <GLFW\glfw3.h>
#include <glm\glm.hpp>
using namespace glm;
int main()
{
if (!glfwInit())
{
fprintf(stderr, "Failed to initialize GLFW\n");
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4); // 4x antialiasing
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); // We want OpenGL 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // To make MacOS happy; should not be needed
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); //We don't want the old OpenGL
// Open a window and create its OpenGL context
GLFWwindow* window; // (In the accompanying source code, this variable is global)
window = glfwCreateWindow(1024, 768, "Tutorial 01", NULL, NULL);
if (window == NULL)
{
fprintf(stderr, "Failed to open GLFW window. If you have an Intel GPU, they are not 3.3 compatible. Try the 2.1 version of the tutorials.\n");
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window); // Initialize GLEW
glewExperimental = true; // Needed in core profile
if (glewInit() != GLEW_OK)
{
fprintf(stderr, "Failed to initialize GLEW\n");
return -1;
}
// Ensure we can capture the escape key being pressed below
glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
// An array of 3 vectors which represents 3 vertices
static const GLfloat g_vertex_buffer_data[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
// This will identify our vertex buffer
GLuint vertexbuffer;
// Generate 1 buffer, put the resulting identifier in vertexbuffer
glGenBuffers(1, &vertexbuffer);
// The following commands will talk about our 'vertexbuffer' buffer
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
// Give our vertices to OpenGL.
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
do {
// 1rst attribute buffer : vertices
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
// Draw the triangle !
glDrawArrays(GL_TRIANGLES, 0, 3); // Starting from vertex 0; 3 vertices total -> 1 triangle
glDisableVertexAttribArray(0);
// Swap buffers
glfwSwapBuffers(window);
glfwPollEvents();
} // Check if the ESC key was pressed or the window was closed
while (glfwGetKey(window, GLFW_KEY_ESCAPE) != GLFW_PRESS &&
glfwWindowShouldClose(window) == 0);
}
Edit
The code is not displaying the white triangle its just opens the window and shows empty black.
Also at first I added the code I messed around with, which is missing part of the code. However it still does not produce the white triangle.
Do I need to use a shader for OpenGL 3.3?
You need to use a shader or you won't see anything. If you do see something then that is unspecified behavior. Which highly depend on each individual driver.
You could try fiddling with the clear color and clear the screen:
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
The tutorial was able to get a white triangle on a black background. On my computer the opposite was the case.
The tutorial also points out:
If you’re on lucky, you can see the result (don’t panic if you don’t)
So read the rest of the tutorial, it also includes a shader.

OpenGL and GLFW - Do I need shaders to draw a point?

I am working through a book on OpenGL, and I am stuck on the third chapter. The following code should open a window and draw a single dot in the center.
The window shows up, glClearColor and glClear seem to be working, but no dot. This answer leads me do believe I may need to compile and link simple vertex and fragment shaders.
The book has made no mention of them, but do I need shaders? Am I missing something else with how I am drawing the dot? Something with the window? The book uses some WIN32 thing for windowing, I am using Linux Mint 17.1
Code below:
#include <stdlib.h>
#include <stdio.h>
#include <GL/glew.h>
#include <GL/gl.h>
#include <GLFW/glfw3.h>
#include <GL/glut.h>
#define BUFFER_OFFSET(i) ( (char*)NULL + (i) )
static void error_callback(int error, const char* description) {
fputs(description, stderr);
}
static void key_callback(GLFWwindow* window, int key, int scancode, int action, int mods) {
if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
}
int main(void) {
GLFWwindow* window;
glfwSetErrorCallback(error_callback);
if (!glfwInit()) {
exit(EXIT_FAILURE);
}
window = glfwCreateWindow(640, 480, "Simple example", NULL, NULL);
if (!window) {
glfwTerminate();
exit(EXIT_FAILURE);
}
glfwMakeContextCurrent(window);
glfwSwapInterval(1);
glfwSetKeyCallback(window, key_callback);
if (glewInit()) {
glfwTerminate();
exit(EXIT_FAILURE);
}
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
float ratio;
int width, height;
glfwGetFramebufferSize(window, &width, &height);
ratio = width / (float) height;
glViewport(0, 0, width, height);
GLfloat vertex [] = {0.0f, 0.0f, -2.0f};
GLuint m_vertexBuffer;
glGenBuffers(1, &m_vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, m_vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat) * 3, &vertex[0], GL_STATIC_DRAW);
while (!glfwWindowShouldClose(window)) {
glClear(GL_COLOR_BUFFER_BIT);
glBindBuffer(GL_ARRAY_BUFFER, m_vertexBuffer);
glVertexPointer(3, GL_FLOAT, 0, BUFFER_OFFSET(0));
glPointSize(10.0f);
glEnableClientState(GL_VERTEX_ARRAY);
glDrawArrays(GL_POINTS, 0, 1);
glDisableClientState(GL_VERTEX_ARRAY);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
exit(EXIT_SUCCESS);
}
Also, I am compiling with g++ main.cpp -lGL -lglfw -lGLU -lGLEW
I have OpenGL 4.5 installed, but am opening a fixed-pipeline context.
From your code:
GLfloat vertex [] = {0.0f, 0.0f, -2.0f};
Since you use the fixed function pipelione and never set any matrices, both the projection and the modelView matrix will be left at the default, and the point (0, 0, -2) is outside fo the viewing volume. Try (0, 0, 0) instead.
If the book uses excatly this code with exactly that vertex coordinates, you'd better search for another one. You should also consider learning "modern" GL instead. The fixed-funciton pipeline basically has been superseeded by shaders in GL2.0 a decade ago.

OpenGL GLFW window closes as soon as it opens

I have problem in seeing my triangle in a black window. The window though just closes as it opens, and does not allow me to see what's going on inside it. I have seen somewhere on the net that i has something to do with the minor-versioning, of which I have no clue how to check on my VGA card.
Here is my full code:
#define GLEW_STATIC
#include <stdio.h>
#include <GL\glew.h>
#include <GL\GLU.h>
#include <GL\glut.h>
#include <glm.hpp>
#include <GL\gl.h>
#include <GLFW\glfw3.h>
using namespace glm;
using namespace std;
int main()
{
glfwWindowHint(GLFW_SAMPLES, 4); // anti aliasing
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); // openGL major version to be 3
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0); // minor set to 3, which makes the version 3.3
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // for MAC OS only
glfwWindowHint(GLFW_OPENGL_COMPAT_PROFILE, GLFW_OPENGL_CORE_PROFILE); //avoid using old openGL
GLFWwindow* window;
window = glfwCreateWindow(1024, 768, "First Window in OpenGL", NULL, NULL);
if (window == NULL)
{
fprintf(stderr, "Failed to open GLFW window. If you have an Intel GPU, they are not 3.3 compatible. Try the 2.1 version of the tutorials.\n");
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
static const GLfloat g_vertex_buffer_data[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0,
0.0f, 1.0f, 0.0f
};
// identifying our vertex buffer
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
// The following commands will talk about our 'vertexbuffer' buffer
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
// Give our vertices to OpenGL.
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
glewExperimental = true; // Needed in core profile
if (glewInit() != GLEW_OK)
{
fprintf(stderr, "Failed to initialize GLEW\n");
return -1;
}
glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
do{
// 1rst attribute buffer : vertices
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
glDrawArrays(GL_TRIANGLES, 0, 3); // Starting from vertex 0; 3 vertices total -> 1 triangle
glDisableVertexAttribArray(0);
glfwSwapBuffers(window);
glfwPollEvents();
}
while (glfwGetKey(window, GLFW_KEY_ESCAPE) != GLFW_PRESS &&
glfwWindowShouldClose(window) == 0);
return 0;
}
Can anyone please help me on keeping the window open and just close it with ESC key as it is expected from the code?
The reason the window closes immediately is because you have a segmentation fault.
This is most likely due to failing to initialize things in proper order.
Initialize GLFW before running any glfw function calls, like so:
// Initialise GLFW
if( !glfwInit() ) {
fprintf( stderr, "Failed to initialize GLFW\n" );
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4); // anti aliasing
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); // openGL major version to be 3
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0); // minor set to 3, which makes the version 3.3
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // for MAC OS only
glfwWindowHint(GLFW_OPENGL_COMPAT_PROFILE, GLFW_OPENGL_CORE_PROFILE); //avoid using old openGL
GLFWwindow* window;
window = gl
Then, move the initialization of glew to right after you create and set the GL context:
if (window == NULL) {
fprintf(stderr, "Failed to open GLFW window. If you have an Intel GPU, they are not 3.3 compatible. Try the 2.1 version of the tutorials.\n");
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE; // Needed in core profile
const GLenum err = glewInit();
if (err != GLEW_OK) {
fprintf(stderr, "Failed to initialize GLEW\n");
fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
return -1;
}
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
// ... rest of code follows
I have tested it, and your code should work fine now, and the window won't close until you hit ESC.

Why does this crash when using OpenGL core profile?

When I try to run this simple OpenGL test program I get a segmentation fault. This only happens when I create the context using the core profile flag. If I use the compatibility profile flag, the program runs without issue.
Edit: I checked the pointer to the function glGenVertexArrays and it returned NULL. If glfwCreateWindow doesn't return NULL, and glGetString(GL_VERSION) confirms that the context is version 4.3 and glewInit returns GLEW_OK then why is glGenVertexArrays == NULL?
My OS is Windows 7 64-bit and my GPU is a Nvidia GTX 760 with 331.82 WHQL driver.
Code:
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <stdlib.h>
#include <stdio.h>
#define GLSL(src) "#version 430 core\n" #src
void key_callback(GLFWwindow* window, int key, int scancode, int action, int mods)
{
if(key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
}
GLuint create_program(const char* vertex_source, const char* fragment_source)
{
GLuint vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs, 1, &vertex_source, NULL);
glCompileShader(vs);
unsigned int fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs, 1, &fragment_source, NULL);
glCompileShader(fs);
GLuint shader_program = glCreateProgram();
glAttachShader(shader_program, fs);
glAttachShader(shader_program, vs);
glLinkProgram(shader_program);
return shader_program;
}
const char* vertex_shader = GLSL(
layout(location = 0) in vec3 vertex_position;
void main()
{
gl_Position = vec4(vertex_position, 1.0);
}
);
const char* fragment_shader = GLSL(
out vec4 frag_color;
void main()
{
frag_color = vec4(1.0, 0.0, 0.0, 1.0);
}
);
int main(int argc, char* argv[])
{
if(!glfwInit())
exit(EXIT_FAILURE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
//if we set GLFW_OPENGL_PROFILE to GLFW_OPENGL_CORE_PROFILE
//instead of GLFW_OPENGL_COMPAT_PROFILE the program will
//segfault at line 98, call to glGenVertexArrays
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(512, 512, "OpenGL", NULL, NULL);
if(!window)
{
glfwTerminate();
exit(EXIT_FAILURE);
}
glfwSetKeyCallback(window, key_callback);
glfwMakeContextCurrent(window);
GLenum glewError = glewInit();
if(glewError != GLEW_OK)
{
glfwTerminate();
exit(EXIT_FAILURE);
}
printf("OpenGL Version: %s\n\n", glGetString(GL_VERSION));
float position[] =
{
1.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
-1.0f, -1.0f, 0.0f
};
unsigned short indices[] =
{
1, 0, 2,
3, 1, 2
};
GLuint vao = 0;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLuint index_buffer = 0;
GLuint vertex_buffer = 0;
glGenBuffers(1, &index_buffer);
glGenBuffers(1, &vertex_buffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, index_buffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), &indices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(position), &position, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(0);
GLuint shader_program = create_program(vertex_shader, fragment_shader);
glUseProgram(shader_program);
while(!glfwWindowShouldClose(window))
{
glClear(GL_COLOR_BUFFER_BIT);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, NULL);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
exit(EXIT_SUCCESS);
}
You're actually getting an Invalid Enum [1280] from OpenGL, after you call glewInit() the easiest fix is to do.
glewExperimental = GL_TRUE;
Before you call glewInit() thereby.
glewExperimental = GL_TRUE;
GLenum glewError = glewInit();
if (glewError != GLEW_OK)
{
glfwTerminate();
exit(EXIT_FAILURE);
}
Why? well it has to do with how GLEW loads extensions, functions, etc. by default GLEW will set some function, etc. as unsupported, thereby to get around that you need to set glewExperimental = GL_TRUE; or else it will generate an error like you where getting.
Experimental Drivers
GLEW obtains information on the supported extensions from the graphics driver. Experimental or pre-release drivers, however, might not report every available extension through the standard mechanism, in which case GLEW will report it unsupported. To circumvent this situation, the glewExperimental global switch can be turned on by setting it to GL_TRUE before calling glewInit(), which ensures that all extensions with valid entry points will be exposed.
Source
Extra
Always remember to check for OpenGL Errors, they usually tell you what's wrong and/or help you find the problem.
GLenum error = glGetError();
if (error != GL_NO_ERROR)
{
std::cout << "OpenGL Error: " << error << std::endl;
}
You can read about the different errors here.
Output the value of glGenVertexArrays. It is likely 0 or null right now. I've found I have to setup certain functions first even in the core profile.
I've encountered similar issues with glew not properly initializing all the function pointers if you create a core profile. I've meant to look into the glew implementation to see why this is the case but haven't gotten around it it, since the compatibility context is serving me OK.
However, one possibility you could try is to use GLFW to create a compatibility context, initialize GLEW, destroy the GLFW window and create a new one with the core context.

How do I create an OpenGL 3.3 context in GLFW 3

I've written a simple OpenGL 3.3 program which is supposed to render a triangle, based off of this tutorial, except I'm using GLFW to create the window and context, instead of doing it from scratch. Also, I'm using Ubuntu.
The triangle doesn't render though, I just get a black screen. Functions like glClearColor() and glClear() seem to be working exactly as they should, but the code to render the triangle doesn't. Here are the relevant bits of it:
#define GLFW_INCLUDE_GL_3
#include <GL/glew.h>
#include <GLFW/glfw3.h>
int main ()
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(800, 600, "GLFW test", NULL, NULL);
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
glewInit();
float vertices[] = {-0.5f, -0.5f, 0.0f, 0.5f, 0.5f, -0.5f};
GLuint VBOid[1];
glClear(GL_COLOR_BUFFER_BIT);
glGenBuffers(1, VBOid);
glBindBuffer(GL_ARRAY_BUFFER, VBOid[0]);
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, VBOid[0]);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glDrawArrays(GL_TRIANGLES, 0, 3);
...
}
What am I missing?
In core profile OpenGL 3.3, you need a shader in order to render. So you need to compile and link a program that contains a vertex and fragment shader.