Why am I always on openGL version 2.1?[openGL/SDL] - c++

I am trying to use openGL with SDL and whenever I check my version during runtime, it always returns that I am using openGL version 2.1. Now to my understanding including gl3.h gave you the 3.2+ functionality of openGL. Besides that point I am specifically asking for version 4.1 of openGL and still apparently running 2.1. Can somebody please tell me what I am doing wrong? I am running OSX Yosemite.
#include <iostream>
//Using SDL and standard IO
#include <SDL2/SDL.h>
#define GL_GLEXT_PROTOTYPES 1
//#include <SDL2/SDL_opengl.h>
#include <GLUT/glut.h>
#include <stdio.h>
#include <OpenGL/gl3.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <string.h>
#ifdef __APPLE__
#define glGenVertexArrays glGenVertexArraysAPPLE
#define glBindVertexArray glBindVertexArrayAPPLE
#define glDeleteVertexArrays glDeleteVertexArraysAPPLE
#endif
using namespace std;
//Screen dimension constants
const int SCREEN_WIDTH = 640;
const int SCREEN_HEIGHT = 480;
bool SetOpenGLAttributes()
{
// Set our OpenGL version.
// SDL_GL_CONTEXT_CORE gives us only the newer version, deprecated functions are disabled
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 4);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 1);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
return true;
}
int main( int argc, char* args[] )
{
//The window we'll be rendering to
SDL_Window* window = NULL;
//Initialize SDL
if( SDL_Init( SDL_INIT_VIDEO ) < 0 )
{
printf( "SDL could not initialize! SDL_Error: %s\n", SDL_GetError() );
}
else
{
//Create window
window = SDL_CreateWindow( "SDL Tutorial", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, SCREEN_WIDTH, SCREEN_HEIGHT, SDL_WINDOW_OPENGL );
if( window == NULL )
{
printf( "Window could not be created! SDL_Error: %s\n", SDL_GetError() );
}
else
{
//creating new context
SDL_GL_CreateContext(window);
//GLuint vertexArrayID;
// glGenVertexArrays(1, &vertexArrayID);
SetOpenGLAttributes();
printf("%s", "This is your version");
printf("%s\n", glGetString(GL_VERSION));
SDL_GL_SetSwapInterval(1);
glEnable(GL_DEPTH_TEST);
SDL_GL_SwapWindow(window);
bool running = true;
while(running){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glFlush();
//drawCube(.5);
SDL_GL_SwapWindow(window);
SDL_Delay(17);
}
}
}
//Destroy window
//SDL_DestroyWindow( window );
//Quit SDL subsystems
//SDL_Quit();
return 0;

You are calling SetOpenGLAttributes after creating the context. Try calling it before SDL_GL_CreateContext(window);.

Related

opengl and SDL2 glgenbuffers segmentation fault

I have a problem with opengl and SDL2.
When I want to trigger glGenBuffers, the application crashes.
Does anyone know where I made a mistake?
#define GLEW_STATIC
#include <iostream>
#include <SDL2/SDL.h>
#include <SDL2/SDL_image.h>
#include <GL/glew.h>
using namespace std;
SDL_Window*win;
SDL_Event event;
const Uint8*keystate = SDL_GetKeyboardState(NULL);
SDL_GLContext context;
int main(int argc, char*args[])
{
SDL_Init(SDL_INIT_EVERYTHING);
glewExperimental = GL_TRUE;
glewInit();
win = SDL_CreateWindow("opengl",SDL_WINDOWPOS_CENTERED,SDL_WINDOWPOS_CENTERED,900,800,SDL_WINDOW_OPENGL);
context = SDL_GL_CreateContext(win);
SDL_GL_MakeCurrent(win,context);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 3);
glClearColor( 1.0, 1.0, 1.0, 1.0 );
float positions[6] =
{
-0.5f,-0.5f,
0.0f,0.5f,
0.5f,0.0f
};
GLuint buffer;
glGenBuffers(1,&buffer); //here
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, 6,positions,GL_STATIC_DRAW);
while(true)
{
while(SDL_PollEvent(&event))
{
if(event.type==SDL_QUIT) return 0;
}
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES,0,3);
SDL_GL_SwapWindow(win);
}
}
I really don't know where the problem is, I would really be grateful to have someone help me.
he GLEW library has to be initialized, by glewInit, after the OpenGL context has become current by SDL_GL_MakeCurrent(win,context). See Initializing GLEW.
win = SDL_CreateWindow("opengl",SDL_WINDOWPOS_CENTERED,SDL_WINDOWPOS_CENTERED,
900,800,SDL_WINDOW_OPENGL);
context = SDL_GL_CreateContext(win);
SDL_GL_MakeCurrent(win,context);
if (glewInit() != GLEW_OK)
return 0;
When you do it before, the initialization of glew fails and glewInit doesn't return GLEW_OK.

How to avoid high GPU usage when calling SDL_GL_SwapWindow when using an OpenGL context?

I'm getting a strange behaviour where the GPU usage gets very high and sometimes just stays below 2%.
Right now I'm using an OpenGL context to make draw calls, you can see the code I'm using to reproduce this bug right here:
#include <GL/glew.h>
#include <SDL2/SDL.h>
#include <string>
static const int DEFAULT_WINDOW_WIDTH = 1280;
static const int DEFAULT_WINDOW_HEIGHT = 720;
int main() {
SDL_Init(SDL_INIT_VIDEO);
#ifdef __APPLE__
SDL_GL_SetAttribute(SDL_GL_CONTEXT_FLAGS,
SDL_GL_CONTEXT_FORWARD_COMPATIBLE_FLAG);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
#endif
SDL_SetHint(SDL_HINT_RENDER_DRIVER, "opengl");
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_GL_SetAttribute(SDL_GL_STENCIL_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_Window *window =
SDL_CreateWindow(nullptr, SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED,
DEFAULT_WINDOW_WIDTH, DEFAULT_WINDOW_HEIGHT,
SDL_WINDOW_OPENGL | SDL_WINDOW_ALLOW_HIGHDPI);
SDL_GLContext glcontext = SDL_GL_CreateContext(window);
glewExperimental = GL_TRUE;
glewInit();
SDL_GL_MakeCurrent(window, glcontext);
SDL_GL_SetSwapInterval(1);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
SDL_Event event;
bool quit = false;
while (!quit) {
while (SDL_PollEvent(&event)) {
quit = SDL_QUIT == event.type;
}
SDL_GL_SwapWindow(window);
}
SDL_GL_DeleteContext(glcontext);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
I isolated this down to just the SDL_GL_SwapWindow call, if you look at the screenshot you can see it's taking 26% of GPU usage just on its own. This is random, if I wait a couple of hours and come back de GPU usage will be around 2%.
I closed all other applications and processes to check this is not happening when other apps are open.
I tried switching GLEW and SDL for GLAD and GLFW and I still get the same result. All this is in macos catalina 10.15.7
Is this normal?

GLFW fails to initialize on Ubuntu?

Here is the code which I'm trying to run:
// Include standard headers
#include <stdio.h>
#include <stdlib.h>
// Include GLEW
#include <GL/glew.h>
// Include GLFW
#include <GLFW/glfw3.h>
GLFWwindow* window;
// Include GLM
#include <glm/glm.hpp>
using namespace glm;
int main( void )
{
// Initialise GLFW
if( !glfwInit() )
{
fprintf( stderr, "Failed to initialize GLFW\n" );
getchar();
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // To make MacOS happy; should not be needed
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
// Open a window and create its OpenGL context
window = glfwCreateWindow( 1024, 768, "Tutorial 01", NULL, NULL);
if( window == NULL ){
fprintf( stderr, "Failed to open GLFW window. If you have an Intel GPU, they are not 3.3 compatible. Try the 2.1 version of the tutorials.\n" );
getchar();
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Initialize GLEW
if (glewInit() != GLEW_OK) {
fprintf(stderr, "Failed to initialize GLEW\n");
getchar();
glfwTerminate();
return -1;
}
// Ensure we can capture the escape key being pressed below
glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
// Dark blue background
glClearColor(0.0f, 0.0f, 0.4f, 0.0f);
do{
// Clear the screen. It's not mentioned before Tutorial 02, but it can cause flickering, so it's there nonetheless.
glClear( GL_COLOR_BUFFER_BIT );
// Draw nothing, see you in tutorial 2 !
// Swap buffers
glfwSwapBuffers(window);
glfwPollEvents();
} // Check if the ESC key was pressed or the window was closed
while( glfwGetKey(window, GLFW_KEY_ESCAPE ) != GLFW_PRESS &&
glfwWindowShouldClose(window) == 0 );
// Close OpenGL window and terminate GLFW
glfwTerminate();
return 0;
}
It only shows a black window with the text:
Failed to initialize GLFW
Also, I've already linked GL, GLEW, SDL2 and glfw3 to the build options.

OpenGL cannot draw with glVertexAttrib in core profile

I want to use OpenGL 3.1.
I'm using a Macbook Pro with 2 graphic cards: NVIDIA GeForce GT 650M 1024 MB, and Intel HD Graphics 4000 1536 MB. They both support up to OpenGL 4.1.
Previously i was able to draw a triangle however, my program was using version 2.1. Therefore I added: SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);. However, now the triangle is no longer drawn.
#include <string>
#include <iostream>
#include <SDL2/SDL.h>
#define GL3_PROTOTYPES 1
#include "../include/GL3/gl3.h"
int main(int argc, const char *argv[]) {
// Initialize the SDL
if(SDL_Init(SDL_INIT_VIDEO) < 0) {
std::cout << "Failed to initialize the SDL: " << SDL_GetError() << std::endl;
SDL_Quit();
return -1;
}
// Configure the SDL to use OpenGL 3.1
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 1);
// ======= HERE =======
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
// ====================
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_Window* window = SDL_CreateWindow("Triangle Test", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 800, 600, SDL_WINDOW_SHOWN | SDL_WINDOW_OPENGL);
if (window == 0) {
std::cout << "Error when creating the window: " << SDL_GetError() << std::endl;
SDL_Quit();
return -1;
}
// Create the OpenGL context
SDL_GLContext contextOpenGL = SDL_GL_CreateContext(window);
// Initialization may fail
if (contextOpenGL == 0) {
std::cout << SDL_GetError() << std::endl;
SDL_DestroyWindow(window);
SDL_Quit();
return -1;
}
SDL_Event events;
bool end = false;
// Define the vertices of our triangle
static const GLfloat vertices[] = {0.0, 1.0, // left point
-0.5, 0.0, // right point
0.5, 0.0}; // upper point
const int TRIANGLE_IDX = 0;
while(!end) {
SDL_WaitEvent(&events);
if(events.window.event == SDL_WINDOWEVENT_CLOSE) {
end = true;
}
// Clear the screen
glClear(GL_COLOR_BUFFER_BIT);
// Send vertices to OpenGL
glVertexAttribPointer(TRIANGLE_IDX, 2, GL_FLOAT, GL_FALSE, 0, vertices);
// Activate our vertex array
glEnableVertexAttribArray(TRIANGLE_IDX);
// Draw the points passed previously
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(TRIANGLE_IDX);
// Refresh the screen
SDL_GL_SwapWindow(window);
}
return 0;
}
I tried to first use glGenBuffers, glBindBuffer and glBufferData but i could not manage to make it work.
The Fixed Function Pipeline has been removed from core OpenGL 3.1 and above.
You will have to use shaders instead. This site has a nice example of how to use them.

Why isn't OpenGL drawing anything?

This code is supposedly the code to draw a triangle however all I get is a black screen. Why am I not getting anything drawn?
Secondly, in my array of vertices, if I put an 'f' after my coordinates like I always see in tutorials I get an error about an invalid digit in an octal. Why can everyone else use 'f' after their numbers and not me?
I am using openGL 4.1 on OSX Yosemite.
#include <iostream>
//Using SDL and standard IO
#include <SDL2/SDL.h>
//#define GL_GLEXT_PROTOTYPES 1
//#include <SDL2/SDL_opengl.h>
#include <GLUT/glut.h>
#include <stdio.h>
#include <OpenGL/gl3.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <string.h>
using namespace std;
//Screen dimension constants
const int SCREEN_WIDTH = 640;
const int SCREEN_HEIGHT = 480;
bool SetOpenGLAttributes()
{
// Set our OpenGL version.
// SDL_GL_CONTEXT_CORE gives us only the newer version, deprecated functions are disabled
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 4);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 1);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
return true;
}
int main( int argc, char* args[] )
{
//The window we'll be rendering to
SDL_Window* window = NULL;
//Initialize SDL
if( SDL_Init( SDL_INIT_VIDEO ) < 0 )
{
printf( "SDL could not initialize! SDL_Error: %s\n", SDL_GetError() );
}
else
{
SetOpenGLAttributes();
//Create window
window = SDL_CreateWindow( "SDL Tutorial", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, SCREEN_WIDTH, SCREEN_HEIGHT, SDL_WINDOW_OPENGL );
if( window == NULL )
{
printf( "Window could not be created! SDL_Error: %s\n", SDL_GetError() );
}
else
{
//creating new context
SDL_GL_CreateContext(window);
GLuint vertexArrayID;
glGenVertexArrays(1, &vertexArrayID);
glBindVertexArray(vertexArrayID);
printf("%s", "This is your version");
printf("%s\n", glGetString(GL_VERSION));
printf("%s", glGetString(GL_RENDERER));
SDL_GL_SetSwapInterval(1);
glEnable(GL_DEPTH_TEST);
float r = 0.5;
static const GLfloat cubeV[] = {
-.5, 0, 0,
.5, 0, 0,
0, .5, 0
};
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(cubeV), cubeV, GL_STATIC_DRAW);
SDL_GL_SwapWindow(window);
bool running = true;
while(running){
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(0);
glFlush();
SDL_GL_SwapWindow(window);
SDL_Delay(17);
}
}
}
//Destroy window
//SDL_DestroyWindow( window );
//Quit SDL subsystems
//SDL_Quit();
return 0;
}