I have a problem with opengl and SDL2.
When I want to trigger glGenBuffers, the application crashes.
Does anyone know where I made a mistake?
#define GLEW_STATIC
#include <iostream>
#include <SDL2/SDL.h>
#include <SDL2/SDL_image.h>
#include <GL/glew.h>
using namespace std;
SDL_Window*win;
SDL_Event event;
const Uint8*keystate = SDL_GetKeyboardState(NULL);
SDL_GLContext context;
int main(int argc, char*args[])
{
SDL_Init(SDL_INIT_EVERYTHING);
glewExperimental = GL_TRUE;
glewInit();
win = SDL_CreateWindow("opengl",SDL_WINDOWPOS_CENTERED,SDL_WINDOWPOS_CENTERED,900,800,SDL_WINDOW_OPENGL);
context = SDL_GL_CreateContext(win);
SDL_GL_MakeCurrent(win,context);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 3);
glClearColor( 1.0, 1.0, 1.0, 1.0 );
float positions[6] =
{
-0.5f,-0.5f,
0.0f,0.5f,
0.5f,0.0f
};
GLuint buffer;
glGenBuffers(1,&buffer); //here
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6,positions,GL_STATIC_DRAW);
while(true)
{
while(SDL_PollEvent(&event))
{
if(event.type==SDL_QUIT) return 0;
}
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES,0,3);
SDL_GL_SwapWindow(win);
}
}
I really don't know where the problem is, I would really be grateful to have someone help me.
he GLEW library has to be initialized, by glewInit, after the OpenGL context has become current by SDL_GL_MakeCurrent(win,context). See Initializing GLEW.
win = SDL_CreateWindow("opengl",SDL_WINDOWPOS_CENTERED,SDL_WINDOWPOS_CENTERED,
900,800,SDL_WINDOW_OPENGL);
context = SDL_GL_CreateContext(win);
SDL_GL_MakeCurrent(win,context);
if (glewInit() != GLEW_OK)
return 0;
When you do it before, the initialization of glew fails and glewInit doesn't return GLEW_OK.
Related
I'm following a fairly simple tutorial and all the files compile, and I am using openGL, glew, and glfw in my current code. The window is not displaying correctly and I don't think it's even displaying. I have my main.cpp:
#include "stdafx.h"
#include "Libs.h"
int main(int argc, char **argv) {
glfwInit();
const int WINDOW_WIDTH = 640;
const int WINDOW_HEIGHT = 480;
int FRAME_BUFFER_WIDTH = 0;
int FRAME_BUFFER_HEIGHT = 0;
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
GLFWwindow* window = glfwCreateWindow(WINDOW_WIDTH, WINDOW_HEIGHT, "Custom Name", NULL, NULL);
glfwGetFramebufferSize(window, &FRAME_BUFFER_WIDTH, &FRAME_BUFFER_HEIGHT);
glViewport(0, 0, FRAME_BUFFER_WIDTH, FRAME_BUFFER_HEIGHT);
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
if (glfwInit() != GLEW_OK) {
std::cout << "GLFW INIT != GL_OK \n";
glfwTerminate();
}
//Main loop
while (!glfwWindowShouldClose(window)) {
glfwPollEvents();
glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
glfwSwapBuffers(window);
glFlush();
}
glfwTerminate();
return 1;
}
and my Libs.h
#pragma once
#include <iostream>
#include <glew.h>
#include <glfw3.h>
#include <glm.hpp>
#include <vec2.hpp>
#include <vec3.hpp>
#include <vec4.hpp>
#include <mat4x4.hpp>
#include <gtc/matrix_transform.hpp>
#include <gtc/type_ptr.hpp>
#include <SOIL2.h>
The whole thing compiles just fine and opens without any errors. I can't see any of the console text that I'm displaying (std::cout) as I'm running in visual studio.
The problem is when it opens, the window is displayed like this: Window that's displayed
As you can see, the window is completely blank and there is no green (which it should be). Also, the window name is my solution name and not the one that I gave it ("Custom Name"). Lastly, the icon for the window is a package, which I think means that it's not GL but rather Visual Studio making it. I have no idea why this is happening and no errors are occurring.
If anyone has a possible fix that would be great, thanks.
Fixed:
Pretty simple, I made my project with extra files already there. I needed to make it from an empty project, hopefully this can help somebody in the future.
This code is supposedly the code to draw a triangle however all I get is a black screen. Why am I not getting anything drawn?
Secondly, in my array of vertices, if I put an 'f' after my coordinates like I always see in tutorials I get an error about an invalid digit in an octal. Why can everyone else use 'f' after their numbers and not me?
I am using openGL 4.1 on OSX Yosemite.
#include <iostream>
//Using SDL and standard IO
#include <SDL2/SDL.h>
//#define GL_GLEXT_PROTOTYPES 1
//#include <SDL2/SDL_opengl.h>
#include <GLUT/glut.h>
#include <stdio.h>
#include <OpenGL/gl3.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <string.h>
using namespace std;
//Screen dimension constants
const int SCREEN_WIDTH = 640;
const int SCREEN_HEIGHT = 480;
bool SetOpenGLAttributes()
{
// Set our OpenGL version.
// SDL_GL_CONTEXT_CORE gives us only the newer version, deprecated functions are disabled
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 4);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 1);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
return true;
}
int main( int argc, char* args[] )
{
//The window we'll be rendering to
SDL_Window* window = NULL;
//Initialize SDL
if( SDL_Init( SDL_INIT_VIDEO ) < 0 )
{
printf( "SDL could not initialize! SDL_Error: %s\n", SDL_GetError() );
}
else
{
SetOpenGLAttributes();
//Create window
window = SDL_CreateWindow( "SDL Tutorial", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, SCREEN_WIDTH, SCREEN_HEIGHT, SDL_WINDOW_OPENGL );
if( window == NULL )
{
printf( "Window could not be created! SDL_Error: %s\n", SDL_GetError() );
}
else
{
//creating new context
SDL_GL_CreateContext(window);
GLuint vertexArrayID;
glGenVertexArrays(1, &vertexArrayID);
glBindVertexArray(vertexArrayID);
printf("%s", "This is your version");
printf("%s\n", glGetString(GL_VERSION));
printf("%s", glGetString(GL_RENDERER));
SDL_GL_SetSwapInterval(1);
glEnable(GL_DEPTH_TEST);
float r = 0.5;
static const GLfloat cubeV[] = {
-.5, 0, 0,
.5, 0, 0,
0, .5, 0
};
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(cubeV), cubeV, GL_STATIC_DRAW);
SDL_GL_SwapWindow(window);
bool running = true;
while(running){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glFlush();
SDL_GL_SwapWindow(window);
SDL_Delay(17);
}
}
}
//Destroy window
//SDL_DestroyWindow( window );
//Quit SDL subsystems
//SDL_Quit();
return 0;
}
I am trying to use openGL with SDL and whenever I check my version during runtime, it always returns that I am using openGL version 2.1. Now to my understanding including gl3.h gave you the 3.2+ functionality of openGL. Besides that point I am specifically asking for version 4.1 of openGL and still apparently running 2.1. Can somebody please tell me what I am doing wrong? I am running OSX Yosemite.
#include <iostream>
//Using SDL and standard IO
#include <SDL2/SDL.h>
#define GL_GLEXT_PROTOTYPES 1
//#include <SDL2/SDL_opengl.h>
#include <GLUT/glut.h>
#include <stdio.h>
#include <OpenGL/gl3.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <string.h>
#ifdef __APPLE__
#define glGenVertexArrays glGenVertexArraysAPPLE
#define glBindVertexArray glBindVertexArrayAPPLE
#define glDeleteVertexArrays glDeleteVertexArraysAPPLE
#endif
using namespace std;
//Screen dimension constants
const int SCREEN_WIDTH = 640;
const int SCREEN_HEIGHT = 480;
bool SetOpenGLAttributes()
{
// Set our OpenGL version.
// SDL_GL_CONTEXT_CORE gives us only the newer version, deprecated functions are disabled
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 4);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 1);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
return true;
}
int main( int argc, char* args[] )
{
//The window we'll be rendering to
SDL_Window* window = NULL;
//Initialize SDL
if( SDL_Init( SDL_INIT_VIDEO ) < 0 )
{
printf( "SDL could not initialize! SDL_Error: %s\n", SDL_GetError() );
}
else
{
//Create window
window = SDL_CreateWindow( "SDL Tutorial", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, SCREEN_WIDTH, SCREEN_HEIGHT, SDL_WINDOW_OPENGL );
if( window == NULL )
{
printf( "Window could not be created! SDL_Error: %s\n", SDL_GetError() );
}
else
{
//creating new context
SDL_GL_CreateContext(window);
//GLuint vertexArrayID;
// glGenVertexArrays(1, &vertexArrayID);
SetOpenGLAttributes();
printf("%s", "This is your version");
printf("%s\n", glGetString(GL_VERSION));
SDL_GL_SetSwapInterval(1);
glEnable(GL_DEPTH_TEST);
SDL_GL_SwapWindow(window);
bool running = true;
while(running){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glFlush();
//drawCube(.5);
SDL_GL_SwapWindow(window);
SDL_Delay(17);
}
}
}
//Destroy window
//SDL_DestroyWindow( window );
//Quit SDL subsystems
//SDL_Quit();
return 0;
You are calling SetOpenGLAttributes after creating the context. Try calling it before SDL_GL_CreateContext(window);.
For anyone who has seen my previous questions, after working through the RedBook for Version 2.1, I am now moving on to Version 4.3. (Hoary you say, since many of you have been telling me to do this for ages.)
So, I am deep into Chapter 3, but still haven't got Chapter 1 's example program working.
I have two problems. (Actually 3.) Firstly, it doesn't compile. Okay so that's a problem, but kind of irrelevant considering the next two. Secondly, I don't exactly understand how it works or what it is trying to do, but we will get onto that.
Thirdly, it seems to me that the author of this code is a complete magician. I would suggest all sorts of tinkery-hackery are occurring here. This is most likely to be because of Problem Number 2, the fact that I don't understand what it is trying to do. The guys who wrote this book are, of course, not idiots, but bear with me, I will give an example.
Here is a section of code taken from the top of the main.cpp file. I will include the rest of the file later on, but for now:
enum VAO_IDs {
Triangles,
NumVAOs
};
If I understand correctly, this gives VAO_IDs::Triangles the value of 1, since enum's are zero based. (I hope I am correct here, or it will be embarrassing for me.)
A short while later, you can see this line:
GLuint VAOs[NumVAOs];
Which declares an array of GLuint's, containing 1 GLuint's due to the fact that NumVAOs is equal to 1. Now, firstly, shouldn't it be VAO_IDs::NumVAOs?
And secondly, why on earth has an enum been used in this way? I would never use an enum like that for obvious reasons - cannot have more than one data with the same value, values are not explicitly specified etc...
Am I barking up the right tree here? It just doesn't make sense to do this... VAOs should have been a global, like this, surely? GLuint NumVAOs = 1; This is just abusive to the enum!
In fact, below the statement const GLuint NumVertices = 6; appears. This makes sense, doesn't it, because we can change the value 6 if we wanted to, but we cannot change NumVAOs to 0 for example, because Triangles is already set to 0. (Why is it in an enum? Seriously?)
Anyway, forget the enum's... For now... Okay so I made a big deal out of that and that's the end of the problems... Any further comments I have are in the code now. You can ignore most of the glfw stuff, its essentially the same as glut.
// ----------------------------------------------------------------------------
//
// Triangles - First OpenGL 4.3 Program
//
// ----------------------------------------------------------------------------
#include <cstdlib>
#include <cstdint>
#include <cmath>
#include <stdio.h>
#include <iostream>
//#include <GL/gl.h>
//#include <GL/glu.h>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
/// OpenGL specific
#include "vgl.h"
#include "LoadShaders.h" // These are essentially empty files with some background work going on, nothing declared or defined which is relevant here
enum VAO_IDs {
Triangles,
NumVAOs
};
// So Triangles = 0, NumVAOs = 1
// WHY DO THIS?!
enum Buffer_IDs {
ArrayBuffer,
NumBuffers
};
enum Attrib_IDs {
vPosition = 0
}
// Please, please, please someone explain the enum thing to me, why are they using them instead of global -just- variables.
// (Yeah an enum is a variable, okay, but you know what I mean.)
GLuint VAOs[NumVAOs]; // Compile error: expected initializer before 'VAOs'
GLuint Buffers[NumBuffers]; // NumBuffers is hidden in an enum again, so it NumVAOs
const GLuint NumVertices = 6; // Why do something different here?
// ----------------------------------------------------------------------------
//
// Init
//
// ----------------------------------------------------------------------------
void init()
{
glGenVertexArrays(NumVAOs, VAOs); // Error: VAOs was not declared in this scope
glBindVertexArray(VAOs[Triangles]);
GLfloat vertices[NumVertices][2] = {
{ -0.90, -0.90 },
{ +0.85, -0.90 },
{ -0.90, +0.85 },
{ +0.90, -0.85 },
{ +0.90, +0.90 },
{ -0.85, +0.90 }
};
glGenBuffers(NumBuffers, Buffers);
glBindBuffer(GL_ARRAY_BUFFER, Buffers[ArrayBuffer]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
ShaderInfo shaders[] = {
{ GL_VERTEX_SHADER, "triangles.vert" },
{ GL_FRAGMENT_SHADER, "triangles.frag" },
{ GL_NONE, nullptr }
};
GLuint program = LoadShaders(shaders);
glUseProgram(program);
glVertexAttribPointer(vPosition, 2, GL_FLOAT, GL_FALSE, 0, BUFFER_OFFSET(0));
glEnableVertexAttribArray(vPosition);
}
// ----------------------------------------------------------------------------
//
// Display
//
// ----------------------------------------------------------------------------
void display()
{
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAOs[Triangles]);
glDrawArrays(GL_TRIANGLES, 0, NumVertices); // Error VAOs not declared
glFlush();
}
// ----------------------------------------------------------------------------
//
// Main
//
// ----------------------------------------------------------------------------
void error_handle(int error, const char* description)
{
fputs(description, stderr);
}
void key_handle(GLFWwindow* window, int key, int scancode, int action, int mods)
{
if(key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
}
void handle_exit()
{
}
int main(int argc, char **argv)
{
// Setup exit function
atexit(handle_exit);
// GLFW Window Pointer
GLFWwindow* window;
// Setup error callback
glfwSetErrorCallback(error_handle);
// Init
if(!glfwInit())
{
exit(EXIT_FAILURE);
}
// Setup OpenGL
glClearColor(0.0, 0.0, 0.0, 0.0);
glEnable(GL_DEPTH_TEST);
// Set GLFW window hints
glfwWindowHint(GLFW_DEPTH_BITS, 32);
glfwWindowHint(GLFW_RED_BITS, 8);
glfwWindowHint(GLFW_GREEN_BITS, 8);
glfwWindowHint(GLFW_BLUE_BITS, 8);
glfwWindowHint(GLFW_ALPHA_BITS, 8);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
//glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, 1);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Init GLEW
if(glewInit())
{
printf("GLEW init failure!\n", stderr);
exit(EXIT_FAILURE);
}
// Init OpenGL
init();
// Create Window
window = glfwCreateWindow(800, 600, "Window Title", nullptr, nullptr);
if(!window)
{
glfwTerminate();
return EXIT_FAILURE;
}
// Make current
glfwMakeContextCurrent(window);
// Set key callback
glfwSetKeyCallback(window, key_handle);
// Check OpenGL Version
char* version;
version = (char*)glGetString(GL_VERSION);
printf("OpenGL Application Running, Version: %s\n", version);
// Enter main loop
while(!glfwWindowShouldClose(window))
{
// Event polling
glfwPollEvents();
// OpenGL Rendering
// Setup OpenGL viewport and clear screen
float ratio;
int width, height;
glfwGetFramebufferSize(window, &width, &height);
ratio = width / height;
glViewport(0, 0, width, height);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Setup projection
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45.0, ratio, 0.1, 10.0);
// Render
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
// Swap Buffers
glfwSwapBuffers(window);
}
// Free glfw memory allocated for window
glfwDestroyWindow(window);
// Exit
glfwTerminate();
exit(EXIT_SUCCESS);
}
A very verbose question I realize, but I thought it was important to explain why I think its crazy code rather than just saying "I don't get it" like it's easy to. Could someone please explain why these very clever people decided to do it this way and why there are errors. (I can find nothing about this online.)
The author is using the automatic numbering property of enums to automatically update the definition for the NumVAOs and NumBuffers values. For example, when new VAO IDs are added to the enum the NumVAOs value will still be correct as long as it is listed last in the enum.
enum VAO_IDs {
Triangles,
Polygons,
Circles,
NumVAOs
};
Most likely your compiler does not support this trick.
I'm trying to build an OpenGL App with glew/glfw. I've downloaded the binaries, placed them in the root of my folder, added the paths to the include and lib directories and told my project to require glew32.lib, GLFW.lib and opengl32.lib.
I even copied glew32.lib to the root directory because my project couldn't see it.
I must keep all the dependencies in the project directory since I will be distributing this. I'm at a loss.
Now when I run my program, it fails at glewInit()
This is my implementation so far:
#include "Common.h"
GameEngine::GameEngine()
{
InitWithSize(1024, 768);
InitInput();
}
void GameEngine::InitWithSize(int _width, int _height)
{
try {
// Initialise GLFW
if( !glfwInit() )
throw std::exception("Failed to initialize GLFW\n");
//glfwOpenWindowHint(GLFW_FSAA_SAMPLES, 4);
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, 3);
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, 3);
glfwOpenWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Open a window and create its OpenGL context
if( !glfwOpenWindow( _width, _height, 0,0,0,0, 32,0, GLFW_WINDOW ) )
throw std::exception("Failed to initialize GLFW\n");
glfwSetWindowTitle( "Tutorial 01" );
// Initialize GLEW
if (glewInit() != GLEW_OK)
throw std::exception("Failed to initialize GLEW\n");
} catch ( std::system_error const& err) {
fprintf(stdout, "System Error: %s", err.what());
glfwTerminate(); // Free glfw if it has been allocated
// Try Again
this->InitWithSize(_width, _height);
} catch( std::exception const& err) {
fprintf(stdout, "Exception Found: %s", err.what());
} catch ( ... ) {
fprintf(stdout,"Unknown Exception Occurred\n");
}
}
void GameEngine::InitInput()
{
// Ensure we can capture the escape key being pressed below
glfwEnable( GLFW_STICKY_KEYS );
}
void GameEngine::StartGameLoop()
{
do{
// Draw nothing, see you in tutorial 2 !
// Swap buffers
glfwSwapBuffers();
} // Check if the ESC key was pressed or the window was closed
while( glfwGetKey( GLFW_KEY_ESC ) != GLFW_PRESS &&
glfwGetWindowParam( GLFW_OPENED ) );
}
void GameEngine::InitTestData()
{
// An array of 3 vectors which represents 3 vertices
static const GLfloat g_vertex_buffer_data[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
};
}
With my Common Header:
#ifndef _COMMON_H
#define _COMMON_H
// OpenGL Libraries
#define GLEW_STATIC
//#pragma comment(lib, "glew32.lib")
#include <GL\glew.h>
#include <GL\glfw.h>
// Core Libraries
#include <Windows.h>
#include <stdio.h>
#include <stdlib.h>
#include <map>
#include <string>
#include <fstream>
// C++ 11 Libraries
#include <memory>
#include <exception>
#include <thread>
#include <chrono>
// Manager Classes
#include "ThreadManager.h"
#include "GameEngine.h"
#include "ShaderManager.h"
// Lesser Classes
#include "Shader.h"
#endif
I know this answer comes a bit late, but I don't see you making the OpenGL context current by calling glfwMakeContextCurrent(window). You have to do that before calling glewInit()
If no error is returned from glewInit() it returns 0. For some reason the return wasn't comparing well to GLEW_OK but if the value is anything but 0, there's an error.
if(glewInit()) {
//Handle Error
}
You are using Core OpenGL version 3.3 so you must specify you are using "new" and by GLEW terms "experimental" API. Add this line before calling glewInit();
glewExperimental=GL_TRUE;
Here is a complete init code from my engine .It is for 4.2 but should work the same for you:
void Engine::InitWithGLFW(){
if(!glfwInit()){
exit(EXIT_FAILURE);
}
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, 4);
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, 2);
//glfwOpenWindowHint(GLFW_OPENGL_FORWARD_COMPAT,GL_TRUE);
glfwOpenWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_COMPAT_PROFILE);
glfwOpenWindowHint(GLFW_FSAA_SAMPLES,4);
glfwDisable(GLFW_AUTO_POLL_EVENTS);
#ifdef DEBUG
glfwOpenWindowHint(GLFW_OPENGL_DEBUG_CONTEXT, GL_TRUE);
#endif
if(!glfwOpenWindow(_width,_height,8, 8, 8, 8, 24, 8,GLFW_WINDOW)){
glfwTerminate();
exit(EXIT_FAILURE);
}
glfwSetWindowTitle("XDEngine V-1.0");
InitGlew();
}
void Engine::InitGlew(){
glewExperimental=GL_TRUE;
GLenum err = glewInit();
if (GLEW_OK != err)
{
/* Problem: glewInit failed, something is seriously wrong. */
fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
}
fprintf(stdout, "Status: Using GLEW %s\n", glewGetString(GLEW_VERSION));
glEnable (GL_BLEND);
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_MULTISAMPLE);
glEnable(GL_DEPTH_CLAMP);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glfwSetWindowSizeCallback(Reshape);
}
I had a similar problem and finally got the solution on opengl.org.
It seems that GLEW developers haven't fixed a core context problem yet.
Though there is a quite easy solution:
glewExperimental=TRUE;
GLenum err=glewInit();
if(err!=GLEW_OK)
{
//Problem: glewInit failed, something is seriously wrong.
cout<<"glewInit failed, aborting."<<endl;
}
HTH
Compare the glewInit() return type with GLenum
if (glewInit() != GLEW_OK) {
printf("glew initialization failed!");
}