This is my code:
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
const int IMAGE_SIZE = 32;
GLFWwindow* window;
GLuint vao;
GLuint vbo;
int initWindow() {
// Initialize GLFW
if (!glfwInit()) {
cerr << "Error: Failed to initialize GLFW." << endl;
return 1;
}
// Set up GLFW window hints for OpenGL 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
// Create window
window = glfwCreateWindow(IMAGE_SIZE * 20, IMAGE_SIZE * 20 + 60, "PBM Image", NULL, NULL);
if (!window) {
cerr << "Error: Failed to create GLFW window." << endl;
glfwTerminate();
return 1;
}
// Create context
glfwMakeContextCurrent(window);
// Set color and blendmode
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
// Create and bind VAO and VBO
glGenVertexArrays(1, &vao);
glBindVertexArray(vao); // Here is the spot where I get the exception
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
return 0;
}
void renderImage(vector<unsigned char> vertices) {
// Update VBO data
glBufferData(GL_ARRAY_BUFFER, vertices.size(), vertices.data(), GL_STATIC_DRAW);
// Set vertex attribute pointers
glVertexAttribPointer(0, 3, GL_UNSIGNED_BYTE, GL_FALSE, 3 * sizeof(unsigned char), (void*)0);
glEnableVertexAttribArray(0);
// Unbind VBO and VAO
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
// Set up projection matrix
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, IMAGE_SIZE, 0.0, IMAGE_SIZE, 0.0, 1.0);
// Clear screen
glClear(GL_COLOR_BUFFER_BIT);
// Bind VAO
glBindVertexArray(vao);
// Draw points
glDrawArrays(GL_POINTS, 0, vertices.size() / 3);
// Unbind VAO
glBindVertexArray(0);
// Swap buffers
glfwSwapBuffers(window);
}
int main() {
if (initWindow()) return 1;
// Here comes the code that generates vector<unsigned char> vertices
renderImage(vertices);
getchar();
// Delete VAO and VBO
glDeleteVertexArrays(1, &vao);
glDeleteBuffers(1, &vbo);
// Terminate GLFW
glfwTerminate();
return 0;
}
It uses glew to setup opengl and glfw for window handling. After initializing the window it generates vector<unsigned char> vertices which renderImage takes in. Afterwards it removes VAO, VBO and terminates GLFW. But it won't even come to that point because it throws the following exception at glBindVertexArray(vao);:
Exception thrown at 0x0000000000000000 in generator.exe: 0xC0000005: Access violation executing location 0x0000000000000000.
Here are my lib and include settings and folders:
VC++ Directories
Input
Lib Folder
Lib x64 Folder
Include GL
Include GLFW
glew needs to be initialized (see Initializing GLEW). If glew is not initialized, the OpenGL API function pointers are not set. Call glewInit() right after glfwMakeContextCurrent(window):
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
return 1;
Related
This is my code:
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
const int IMAGE_SIZE = 32;
GLFWwindow* window;
GLuint vao;
GLuint vbo;
int initWindow() {
// Initialize GLFW
if (!glfwInit()) {
cerr << "Error: Failed to initialize GLFW." << endl;
return 1;
}
// Set up GLFW window hints for OpenGL 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
// Create window
window = glfwCreateWindow(IMAGE_SIZE * 20, IMAGE_SIZE * 20 + 60, "PBM Image", NULL, NULL);
if (!window) {
cerr << "Error: Failed to create GLFW window." << endl;
glfwTerminate();
return 1;
}
// Create context
glfwMakeContextCurrent(window);
// Set color and blendmode
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
// Create and bind VAO and VBO
glGenVertexArrays(1, &vao);
glBindVertexArray(vao); // Here is the spot where I get the exception
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
return 0;
}
void renderImage(vector<unsigned char> vertices) {
// Update VBO data
glBufferData(GL_ARRAY_BUFFER, vertices.size(), vertices.data(), GL_STATIC_DRAW);
// Set vertex attribute pointers
glVertexAttribPointer(0, 3, GL_UNSIGNED_BYTE, GL_FALSE, 3 * sizeof(unsigned char), (void*)0);
glEnableVertexAttribArray(0);
// Unbind VBO and VAO
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
// Set up projection matrix
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, IMAGE_SIZE, 0.0, IMAGE_SIZE, 0.0, 1.0);
// Clear screen
glClear(GL_COLOR_BUFFER_BIT);
// Bind VAO
glBindVertexArray(vao);
// Draw points
glDrawArrays(GL_POINTS, 0, vertices.size() / 3);
// Unbind VAO
glBindVertexArray(0);
// Swap buffers
glfwSwapBuffers(window);
}
int main() {
if (initWindow()) return 1;
// Here comes the code that generates vector<unsigned char> vertices
renderImage(vertices);
getchar();
// Delete VAO and VBO
glDeleteVertexArrays(1, &vao);
glDeleteBuffers(1, &vbo);
// Terminate GLFW
glfwTerminate();
return 0;
}
It uses glew to setup opengl and glfw for window handling. After initializing the window it generates vector<unsigned char> vertices which renderImage takes in. Afterwards it removes VAO, VBO and terminates GLFW. But it won't even come to that point because it throws the following exception at glBindVertexArray(vao);:
Exception thrown at 0x0000000000000000 in generator.exe: 0xC0000005: Access violation executing location 0x0000000000000000.
Here are my lib and include settings and folders:
VC++ Directories
Input
Lib Folder
Lib x64 Folder
Include GL
Include GLFW
glew needs to be initialized (see Initializing GLEW). If glew is not initialized, the OpenGL API function pointers are not set. Call glewInit() right after glfwMakeContextCurrent(window):
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
return 1;
I am using GLEW and GLFW and I am repeatedly getting this error :
Exception thrown at 0x0819FF06 in GLTest.exe: 0xC0000005: Access violation reading location 0x00000000.
I pretty sure that I have correctly initialized GLEW (which seems to be whats wrong when most people get this error).
bool Game_Window::CreateWindow()
{
if (glfwInit() != true)
return false;
glfwWindowHint(GLFW_RESIZABLE, GLFW_FALSE);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
m_window = glfwCreateWindow(m_width, m_height, m_title, NULL, NULL);
glfwMakeContextCurrent(m_window);
glewExperimental = true;
if (glewInit() != GLEW_OK)
return false;
glClearColor(0.3f, 0.3f, 0.3f, 1.0f);
std::cout << glGetString(GL_VERSION) << std::endl;
return true;
}
Here is the code I'm using to draw it:
#include "Game_Window.h"
#include "Shader.h"
float verticies[] =
{
-0.5f,-0.5f,0.0f,
0.5f,-0.5f,0.0f,
0.0f,0.5f,0.0f,
};
GLuint indecies[] =
{
0,1,2,
};
int main()
{
Game_Window window("Window", 1600, 900);
if (window.CreateWindow())
{
Shader shader("basic.vert", "basic.frag");
shader.CreateShader();
shader.Use();
GLuint VAO, VBO, EBO;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(VBO, sizeof(verticies), verticies, GL_STATIC_DRAW);
glGenBuffers(1, &EBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indecies), indecies, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float),
(void*)0);
glEnableVertexAttribArray(0);
while (window.ShouldStayOpen())
{
window.Update();
window.clear();
glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_INT, 0);
window.swapBuffers();
}
}
return 0;
}
I am quite confused by this error as it seems also occurs on random glfw functions (but im not sure as I can't get it to happen consistently). I don't get any error untill I add:
glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_INT, 0);
The 1st parameter of glBufferData has to be the target enumeration constant (e.g. GL_ARRAY_BUFFER), not the named buffer object.
Change your code like this to solve the issue:
glBufferData(
GL_ARRAY_BUFFER, // GL_ARRAY_BUFFER instead of VBO
sizeof(verticies), verticies, GL_STATIC_DRAW);
Note, if you would check for OpenGL errors (glGetError), then you would get an GL_INVALID_ENUM error in this case.
I am trying to make a triangle and I'm using GLEW, GLFW and GLM as extensions of OpenGL.
Here's the code I have:
#include <stdlib.h>
#include <stdio.h>
#include <gl\glew.h>
#include <GLFW\glfw3.h>
#include <glm\glm.hpp>
using namespace glm;
int main()
{
if (!glfwInit())
{
fprintf(stderr, "Failed to initialize GLFW\n");
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4); // 4x antialiasing
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); // We want OpenGL 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // To make MacOS happy; should not be needed
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); //We don't want the old OpenGL
// Open a window and create its OpenGL context
GLFWwindow* window; // (In the accompanying source code, this variable is global)
window = glfwCreateWindow(1024, 768, "Tutorial 01", NULL, NULL);
if (window == NULL)
{
fprintf(stderr, "Failed to open GLFW window. If you have an Intel GPU, they are not 3.3 compatible. Try the 2.1 version of the tutorials.\n");
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window); // Initialize GLEW
glewExperimental = true; // Needed in core profile
if (glewInit() != GLEW_OK)
{
fprintf(stderr, "Failed to initialize GLEW\n");
return -1;
}
// Ensure we can capture the escape key being pressed below
glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
// An array of 3 vectors which represents 3 vertices
static const GLfloat g_vertex_buffer_data[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
// This will identify our vertex buffer
GLuint vertexbuffer;
// Generate 1 buffer, put the resulting identifier in vertexbuffer
glGenBuffers(1, &vertexbuffer);
// The following commands will talk about our 'vertexbuffer' buffer
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
// Give our vertices to OpenGL.
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
do {
// 1rst attribute buffer : vertices
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
// Draw the triangle !
glDrawArrays(GL_TRIANGLES, 0, 3); // Starting from vertex 0; 3 vertices total -> 1 triangle
glDisableVertexAttribArray(0);
// Swap buffers
glfwSwapBuffers(window);
glfwPollEvents();
} // Check if the ESC key was pressed or the window was closed
while (glfwGetKey(window, GLFW_KEY_ESCAPE) != GLFW_PRESS &&
glfwWindowShouldClose(window) == 0);
}
Edit
The code is not displaying the white triangle its just opens the window and shows empty black.
Also at first I added the code I messed around with, which is missing part of the code. However it still does not produce the white triangle.
Do I need to use a shader for OpenGL 3.3?
You need to use a shader or you won't see anything. If you do see something then that is unspecified behavior. Which highly depend on each individual driver.
You could try fiddling with the clear color and clear the screen:
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
The tutorial was able to get a white triangle on a black background. On my computer the opposite was the case.
The tutorial also points out:
If you’re on lucky, you can see the result (don’t panic if you don’t)
So read the rest of the tutorial, it also includes a shader.
I am running a basic openGL program that compiles and runs fine in command prompt with the command
g++ gltest.cpp -std=c++11 -lglew32s -lglfw3 -lgdi32 -lopengl32 -o gltest.exe
but when I try to use it in netbeans it gives me a bunch of "undefined reference to `_imp____glewCreateShader'" errors. All references to glew functions.
I have the needed libraries put in the linker options
here is the code:
// g++ gldrop.cpp -std=c++11 -lglew32s -lglfw3 -lgdi32 -lopengl32 -o gldrop.exe
#define GLEW_STATIC
#include <iostream>
#include "gl\glew.h"
#include "gl\glfw3.h"
#include "shaderM.cpp"
#include "glm\glm\gtc\matrix_transform.hpp"
#include "glm\glm\glm.hpp"
#include "data.cpp"
GLFWwindow* window;
int main(int argv, char** argc)
{
//initialize GLFW
if (!glfwInit())
{
std::cout << "Failed to initialize GLFW \n";
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
//create GLFW window
window = glfwCreateWindow(640, 480, "GL", NULL, NULL);
glfwMakeContextCurrent(window);
glewExperimental = true;
//initialize GLEW
if(glewInit() != GLEW_OK)
{
std::cout << "failed to initialize glew";
return -1;
}
glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
//background
glClearColor(0.4f, 0.3f, 0.7f, 0.0f);
//depth test
glEnable(GL_DEPTH_TEST);
//takes fragments closer to the camera
glDepthFunc(GL_LESS);
//Vertex array object
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
//Vertex buffer
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
//color buffer
GLuint colorbuffer;
glGenBuffers(1, &colorbuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(g_color_buffer_data), g_color_buffer_data, GL_STATIC_DRAW);
//create shaders and attach them to a program object
GLuint program = rigShadersToProgram();
GLuint matrixID = glGetUniformLocation(program, "MVP");
//projection matrix 45 degree FoV, 4:3 ratio, display range 0.1 - 100
glm::mat4 projection = glm::perspective(70.0f, 4.0f/3.0f, 0.1f, 100.0f);
//camera matrix
glm::mat4 view = glm::lookAt(
glm::vec3(4,3,-3), //camera is at (4,3,-3)
glm::vec3(0,0, 0), //looks at origin
glm::vec3(0,1, 0) //head is up
);
//model matrix identity matrix
glm::mat4 model = glm::mat4(1.0f);
//model-view-projection
glm::mat4 MVP = projection * view * model;
while (!glfwWindowShouldClose(window))
{
//clear the screen
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//use the compiled shaders
glUseProgram(program);
//send transformation matrix to currently bound shader
glUniformMatrix4fv(matrixID, 1, GL_FALSE, &MVP[0][0]);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, //index
3, //size
GL_FLOAT, //type
GL_FALSE, //normalized?
0, //stride
0 //array buffer offset
);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
glVertexAttribPointer(
1, //index
3, //size
GL_FLOAT, //type
GL_FALSE, //normalized?
0, //stride
0 //array buffer offset
);
//draw triangle
glDrawArrays(GL_TRIANGLES, 0, 12*3); //start # vertex 0, 3 verticies
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteBuffers(1, &vertexbuffer);
glDeleteBuffers(1, &colorbuffer);
glDeleteProgram(program);
glDeleteVertexArrays(1, &vao);
glfwTerminate();
return 0;
}
Not sure why it won't recognize the libraries.
Where did you download glew32?
maybe you downloaded the Visual C version if not you could've unpacked the wrong version (64 bit).
Another thing I've heard (try this first)
I've had issues with glew32s on mingw and for some reason it doesn't work, use regular glew32. It should still work with the preprocessor and compiles statically without any issue.
I have the following program, and I cannot figure out why the glColor4f() call is not resulting in a red line from the output of glDrawArrays() can anyone tell me what I'm doing wrong? A white line is being output for some reason. Is glColor4f() no longer supported in modern versions of GL. If that's the case, what's the proper way to color lines like this?
All of my googling led me to find that I should be calling:
glDisable(GL_LIGHTING);
glDisable(GL_TEXTURE_2D);
glEnable(GL_COLOR_MATERIAL);
glColorMaterial(GL_FRONT_AND_BACK, GL_AMBIENT_AND_DIFFUSE);
However, after adding those calls I don't see any difference.
#include <vector>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/glm.hpp>
int main(int argc, char *argv[]) {
if(!glfwInit()) {
return EXIT_FAILURE;
}
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow* window = glfwCreateWindow(800, 600, "Line Intersection", NULL, NULL);
if(!window) {
glfwTerminate();
return EXIT_FAILURE;
}
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
if(glewInit() != GLEW_OK) {
glfwTerminate();
return EXIT_FAILURE;
}
if(!GLEW_VERSION_3_2) {
glfwTerminate();
return EXIT_FAILURE;
}
static const glm::vec3 arr[] = {
glm::vec3(0,0,0),
glm::vec3(0,0.8f,0)
};
const glm::vec3* first = arr;
const glm::vec3* last = arr + sizeof(arr) / sizeof(arr[0]);
std::vector<glm::vec3> vertexBufferData (first, last);
static GLuint vertexBufferId;
glGenBuffers(1, &vertexBufferId);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferId);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertexBufferData[0]) * vertexBufferData.size(), &vertexBufferData[0], GL_STATIC_DRAW);
GLuint vertexArrayId;
glGenVertexArrays(1, &vertexArrayId);
glBindVertexArray(vertexArrayId);
glDisable(GL_LIGHTING);
glDisable(GL_TEXTURE_2D);
glEnable(GL_COLOR_MATERIAL);
glColorMaterial(GL_FRONT_AND_BACK, GL_AMBIENT_AND_DIFFUSE);
while(!glfwWindowShouldClose(window)){
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glColor4f(1.0f,0.0f,0.0f,1.0f);
glDrawArrays(GL_LINES, 0, vertexBufferData.size());
glDisableVertexAttribArray(0);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return EXIT_SUCCESS;
}
You've selected a Core profile.
There are no freebies in Core, you must provide a vertex and fragment shader.