I am writting a C++ application which works with OpenGL on Mac OS X.
I have tried GLFW and Freeglut for window management.
Both glfw and freeglut have been installed with brew
There is something i do not understand.
Here is my C++ Code for FreeGlut:
int main(int argc, char* argv[])
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGBA);
glutInitContextVersion (3, 3);
glutInitContextFlags (GLUT_CORE_PROFILE | GLUT_DEBUG);
glutInitWindowSize(WIDTH, HEIGHT);
glutCreateWindow("Test");
glewExperimental = GL_TRUE;
GLenum err = glewInit();
if (GLEW_OK != err)
{
return -1;
}
cout < <"GL_SHADING_LANGUAGE_VERSION: "<< glGetString (GL_SHADING_LANGUAGE_VERSION) << endl;
...
There is the output:
GL_SHADING_LANGUAGE_VERSION: 1.20
And here is my C++ code with GLFW:
int main(int argc, const char * argv[])
{
if (!glfwInit())
{
return -1;
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
window = glfwCreateWindow(640, 480, "Test", NULL, NULL);
if (window == NULL) {
return -1;
}
glfwMakeContextCurrent(window);
glewExperimental = true;
if (glewInit() != GLEW_OK) {
return -1;
}
std::cout << "GL_SHADING_LANGUAGE_VERSION: " << glGetString(GL_SHADING_LANGUAGE_VERSION) << std::endl;
Here is the output:
GL_SHADING_LANGUAGE_VERSION: 4.10
My question is why the GLSL version is not the same ?
Thanks
The glut initialization is wrong. GLUT_CORE_PROFILE is not a valid parameter for glutInitContextFlags. The correct code should look like this:
glutInitContextVersion(3, 3);
glutInitContextProfile(GLUT_CORE_PROFILE);
glutInitContextFlags(GLUT_DEBUG);
Source
Also note, that you are not requesting the same profile in both examples. The GLUT examples asks for 3.3 Core with Debug while the glfw example asks for 3.3 Core with Forward Compatibility.
Related
#include "list.h"
int main()
{
//INIT GLFW
glfwInit();
//CREATE WINDOW
const int WINDOW_WIDTH = 640;
const int WINDOW_HEIGHT = 480;
int framebufferWidth = 0;
int framebufferHight = 0;
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 4);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow* window = glfwCreateWindow(WINDOW_WIDTH,WINDOW_HEIGHT,"Title", NULL, NULL);
glfwGetFramebufferSize(window, &framebufferWidth, &framebufferHight);
glViewport(0, 0, framebufferWidth, framebufferHight);
glfwMakeContextCurrent(window);//IMPORTIANT!!
//INIT GLEW (NEEDS WINDOW AND OPENGL CONTEXT)
glewExperimental = GL_TRUE;
>//Error
if (glewInit() != GLEW_OK)
{
std::cout << "ERROR::MAIN.CPP::GLEW_INIT_FAILED" << "\n";
glfwTerminate();
}
//MAIN LOOP
while (glfwWindowShouldClose(window))
{
//UPDATE INPUT ---
//UPDATE ---
//DRAW ---
//Clear
//Draw
//End Draw
}
//END OF PROGAM
glfwTerminate();
return 0;
}
glViewport(0, 0, framebufferWidth, framebufferHight); is giving me
Unhandled exception at >0x00007FF704D6E7D9 in OpenzGL4.exe: 0xC0000005: Access violation reading >location >0x0000000000000348.
when I run it.
For any OpenGL instruction is required a valid and current OpenGL Context. Hence glfwMakeContextCurrent hast to be invoked before any OpneGL instruction:
GLFWwindow* window = glfwCreateWindow(WINDOW_WIDTH,WINDOW_HEIGHT,"Title", NULL, NULL);
glfwMakeContextCurrent(window); // <----- ADD
glfwGetFramebufferSize(window, &framebufferWidth, &framebufferHight);
glViewport(0, 0, framebufferWidth, framebufferHight);
glfwMakeContextCurrent(window); // <----- DELETE
In addition to what Rabbid76 already wrote in his answer, there is another problem in your code:
glViewport(0, 0, framebufferWidth, framebufferHight);
glfwMakeContextCurrent(window);//IMPORTIANT!!
//INIT GLEW (NEEDS WINDOW AND OPENGL CONTEXT)
glewExperimental = GL_TRUE;
>//Error
if (glewInit() != GLEW_OK) {
std::cout << "ERROR::MAIN.CPP::GLEW_INIT_FAILED" << "\n";
glfwTerminate(); }
Since you use the GLEW OpenGL loader, every gl...() Function name is actually remapped as a preprocessor macro to a function pointer, and glewInit will query all those function pointers (and that needs an active OpenGL context, so it can't be done before the glfwMakeContextCurrent). So it is not enough to move the glViewport after the glfwMakeContextCurrent, you must also move it after glewInit.
And there is a second issue with this code: the glewExperimental = GL_TRUE is an evil hack for a bug in GLEW 1.x with OpenGL core profiles, and it's use can't be discouraged enough. Just update to GLEW 2.x or another loader which is compatible with OpenGL core profile contexts.
After running several tests on the code, I have determined that both GLFW and GLEW are initialised successfully yet when I try and create a GLFWwindow* object to be used with GLFW functions, the glfwCreateWindow() function returns a nullptr. Why is this and how do I fix it? Here is my code:
#include <iostream>
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
const GLuint windowWidth = 500, windowHeight = 500;
int main()
{
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow* window = glfwCreateWindow(windowWidth, windowHeight, "Learn OpenGL", nullptr, nullptr);
if (window == nullptr) {
std::cout << "Failed to create GLFW window!" << std::endl;
char myvar1; std::cin >> myvar1;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
if (glewInit() != GL_TRUE) {
std::cout << "Failed to initialize GLEW" << std::endl;
char myvar2; std::cin >> myvar2;
return -1;
}
glViewport(0, 0, windowWidth, windowHeight);
while (!glfwWindowShouldClose(window)) {
glfwPollEvents();
glfwSwapBuffers(window);
}
glfwTerminate();
return 0;
}
This is probably because you are specifying version 3.3 for the context creation and your opengl version is lower than 3.3.
OpenGL: GLFW_CONTEXT_VERSION_MAJOR and GLFW_CONTEXT_VERSION_MINOR are not hard constraints, but creation will fail if the OpenGL version of the created context is less than the one requested.
This might happen if you are using a laptop that has 2 GPU's. They do that for power-consumption reasons, most applications will be run with the standard GPU and for games for example it will use the high performance one.
For example my laptop has a built-in Intel(R) HD Graphics 3000(3.1 opengl version) GPU and a NVIDIA geforce gt 630M(4.4 opengl version) GPU.
You can see if your laptop has this functionality if you right click on an application shortcut and have the option "run with graphics processor": - "High performance (NVIDIA) processor" - "Integrated graphics (default)"
The problem is that the editor(eclipse/ms visual studio, etc..)(in which you run your code) will use the default one and usually has a much lower version of opengl than your other GPU.
You can fix this by always running your editor program with your high performance GPU.
If you're not using a laptop or only have one GPU then try updating your drivers.
Is it possible to switch from "vbo mode" to immediate mode in opengl and backward?
When debugging, I find it easier to just go glBegin(...) than to setup vaos, vbos etc..
In my init() function, there's this:
if (!(GLEW_ARB_vertex_shader && GLEW_ARB_fragment_shader))
{
Log::instance() << "glsl not ready.\n";
return false;
}
Does it initiate the shaders state and means I can't go back to fixed function pipeline?
Edit:
My initialization:
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_MULTISAMPLE);
glutInitWindowSize(1600,900);
glutInitWindowPosition(200, 50);
glutCreateWindow("OpenGL4");
glutIgnoreKeyRepeat(0);
glutKeyboardUpFunc(keyboardUp);
glutSpecialFunc(specialKeyboard);
glutSpecialUpFunc(specialKeyboardUp);
glutSetCursor(GLUT_CURSOR_NONE);
...
GLenum result = glewInit();
if(result != GLEW_OK) {
Log::instance() << "glewInit() error.\n";
return false;
}
if (!(GLEW_ARB_vertex_shader && GLEW_ARB_fragment_shader))
{
Log::instance() << "glsl not ready.\n";
return false;
}
//opengl stuff
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glClearColor(0.15f, 0.15f, 0.15f, 1.0f);
}
I am using a simple c++ program in mac osx 10.9, using opencv 2.4.8 which has been compiled with QT support and OPENGL support. Here is the relevant portion of my cmake config for building opencv 2.4.8
GUI:
-- QT 4.x: YES (ver 4.8.6 EDITION = OpenSource)
-- QT OpenGL support: YES (/usr/local/Cellar/qt/4.8.5/lib/QtOpenGL.framework)
-- OpenGL support: YES (/System/Library/Frameworks/AGL.framework /System/Library/Frameworks/OpenGL.framework)
Here is a simple GUI program,
int main( int argc, char *argv[] ) { namedWindow("my-win", WINDOW_OPENGL);
int desiredWindowWidth = 800, desiredWindowHeight =600;
resizeWindow("my-win", desiredWindowWidth, desiredWindowHeight);
cv::VideoCapture capture(0);
for(;;) {
Mat view;
if( capture.isOpened() )
{
capture >> view;
}
imshow("my-win", view);
char key = (char)waitKey(30);
if( key == 's') {
unsigned char* pixels = new unsigned char [ 3 * desiredWindowWidth * desiredWindowHeight];
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glReadPixels(0, 0, desiredWindowWidth, desiredWindowHeight, GL_RGB, GL_UNSIGNED_BYTE, pixels);
Mat savedImage(desiredWindowHeight, desiredWindowWidth, CV_8UC3, pixels);
cv::flip(savedImage, savedImage, 0);
imwrite("./foo.jpg", savedImage);
}
}
}
Result : I get a window with the current video camera image as output. So far so good.
But the window that I get is small compared to what I had asked for (800 x 600) possibly a 400 x 300 window. Further, if I give any other proportional window size parameters, (eg: 1200 x 900), I still get the same sized window. Look like the "resizeWindow" is not functioning properly.
There is no way to check the size of the window created. What I did was to save the window as an image, I get a black 800 x 600 image with the window content appearing in one corner as a 400x300 part.
Turns out that, I was indeed using openGL 2.1 as demonstrated by the following glxinfo output.
glxinfo | grep OpenGL
OpenGL vendor string: Intel Inc.
OpenGL renderer string: Intel Iris Pro OpenGL Engine
OpenGL version string: 2.1 INTEL-8.20.26
OpenGL shading language version string: 1.20
OpenGL extensions:
I rewrote my application by including glew and gflw3 and forced a 3.2 version of OpenGL.
Now I am able to control the size of the window.
int main(int argc, char **argv) {
GLFWwindow* window;
const GLubyte * strGLVersion;
if (!glfwInit())
exit(EXIT_FAILURE);
//ask for OpenGL 3.2
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
window = glfwCreateWindow(800, 600, "check-opengl", NULL, NULL);
if (!window) {
glfwTerminate();
exit(EXIT_FAILURE);
}
glfwMakeContextCurrent(window);
//more accommodating glew
glewExperimental = true;
if(glewInit() != GLEW_OK) {
glfwTerminate();
exit(EXIT_FAILURE);
}
//v-sync off
glfwSwapInterval(0);
//print version
strGLVersion = glGetString(GL_VERSION);
cout << "GL_VERSION:" << strGLVersion << endl;
const GLubyte * strGLShadingLanguageVersion = glGetString(GL_SHADING_LANGUAGE_VERSION);
cout << "GL_VERSION:" << strGLShadingLanguageVersion << endl;
VideoCapture cap(0);
if(!cap.isOpened()) // check if we succeeded
return -1;
Mat image;
cap >> image;
const string win_name("kgeorge-ar");
while (!glfwWindowShouldClose(window)) {
cap >> image;
//-----------8<------ snip -------8<------------
//code for converting image to opengl texture
//and rendering it to a quad is omited
//-----------8<------ snip -------8<------------
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
exit(EXIT_SUCCESS);
return 0;
}
I am trying to get an OpenGL context above version 2 on mac using GLFW. My configuration is Mavericks (10.9.1) + XCode and I have an Nvidia Geforce 650M GPU with OpenGL 4.1 Full Profile potentially supported. I use the following code:
static void test_error_cb (int error, const char *description)
{
fprintf(stderr, "%d: %s\n", error, description);
}
int main(void)
{
GLFWwindow* window;
glfwSetErrorCallback(test_error_cb);
// Initialise GLFW
if (!glfwInit())
{
fprintf(stderr, "Failed to initialize GLFW\n");
exit(EXIT_FAILURE);
}
//Request Specific Version
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Open OpenGL window
window = glfwCreateWindow(500, 500, "Split view demo", NULL, NULL);
if (!window)
{
fprintf(stderr, "Failed to open GLFW window\n");
glfwTerminate();
exit(EXIT_FAILURE);
}
// Set callback functions
glfwSetFramebufferSizeCallback(window, framebufferSizeFun);
glfwSetWindowRefreshCallback(window, windowRefreshFun);
glfwSetCursorPosCallback(window, cursorPosFun);
glfwSetMouseButtonCallback(window, mouseButtonFun);
glfwSetKeyCallback(window, key_callback);
// Enable vsync
glfwMakeContextCurrent(window);
glfwSwapInterval(1);
glfwGetFramebufferSize(window, &width, &height);
framebufferSizeFun(window, width, height);
//Check Version
int major, minor, rev;
major = glfwGetWindowAttrib(window, GLFW_CONTEXT_VERSION_MAJOR);
minor = glfwGetWindowAttrib(window, GLFW_CONTEXT_VERSION_MINOR);
rev = glfwGetWindowAttrib(window, GLFW_CONTEXT_REVISION);
printf("OpenGL version recieved: %d.%d.%d\n", major, minor, rev);
printf("Supported OpenGL is %s\n", (const char*)glGetString(GL_VERSION));
printf("Supported GLSL is %s\n", (const char*)glGetString(GL_SHADING_LANGUAGE_VERSION));
// Main loop
for (;;)
{
// Only redraw if we need to
if (do_redraw)
{
// Draw all views
drawAllViews();
// Swap buffers
glfwSwapBuffers(window);
do_redraw = 0;
}
// Wait for new events
glfwWaitEvents();
// Check if the window should be closed
if (glfwWindowShouldClose(window))
break;
}
// Close OpenGL window and terminate GLFW
glfwTerminate();
exit(EXIT_SUCCESS);
}
Currently glfwCreateWindow function fails. Without any hints (i.e. no glfwWindowHint calls) I can only have OpenGL 2.1 with glsl version at 1.20. Advise.
A Core context is required to access GL versions greater than 2.1 on OSX.
Uncomment your GLFW_OPENGL_PROFILE hint.
Try adding this before your #include <GLFW/glfw3.h> and uncomment your profile hint.
#define GLFW_INCLUDE_GLCOREARB
Not sure why you're disabling the core profile hint. I don't have an answer, but you might get some more information via the error callback, e.g.,
extern "C"
{
static void test_error_cb (int error, const char *description)
{
fprintf(stderr, "%d: %s\n", error, description);
}
}
...
{
glfwSetErrorCallback(test_error_cb);
if (glfwInit() != GL_TRUE)
{
....
This is a function that can be used prior to glfwInit.