Using SDL and openGL drawing primitives does not work - opengl

I tried to do some basic stuff like a gameloop and drawing with OpenGL by following a tutorial. In that tutorial SDL 1.2 is used but I am using 2.0 so I had to replace some old functions with the new versions.
Unfortunately the lines I want to draw don't show up.
I already tried playing with the numbers so that the line is not outside the window but it did not work.
#include "SDL.h"
#include "SDL_opengl.h"
#include <iostream>
int main(int argc, char* args[])
{
//initialize SDL
SDL_Init(SDL_INIT_EVERYTHING);
//Set openGL memory usage
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BUFFER_SIZE, 32);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_Window *window;
window=SDL_CreateWindow("MyWindow",
SDL_WINDOWPOS_CENTERED,
SDL_WINDOWPOS_CENTERED,
600,400,
SDL_WINDOW_OPENGL);
SDL_Renderer *renderer;
renderer = SDL_CreateRenderer(window,-1,SDL_RENDERER_ACCELERATED);
SDL_SetRenderDrawColor(renderer,255,255,255,255);//RGB/ALPHA
glShadeModel(GL_SMOOTH);
//2D rendering
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
//Disable depth checking
glDisable(GL_DEPTH_TEST);
std::cout<<"OpenGL is running\n";
std::cout<<"Main loop has started\n";
//Handle GameLoop
bool isRunning=true;
//Handel events
SDL_Event event;
//main gameloop
while(isRunning)
{
//Events
while(SDL_PollEvent(&event))
{
if(event.type==SDL_QUIT)
isRunning=false;
//if esc button is released
if(event.type==SDL_KEYUP&&event.key.keysym.sym==SDLK_ESCAPE)
isRunning=false;
if(event.type==SDL_KEYUP&&event.key.keysym.sym==SDLK_r)
SDL_SetRenderDrawColor(renderer,255,0,0,255);
//Logic for certain events
}
//Logic
//Rendering
SDL_RenderClear(renderer);
SDL_RenderPresent(renderer);
glPushMatrix();
glOrtho(0,600,400,0,-1,1); //Set matrix
glBegin(GL_LINES);//start drawing
glColor4ub(255,0,0,255);
glVertex2i(0,0);
glVertex2i(600,400);
glEnd();// end drawing
glPopMatrix();
SDL_GL_SwapWindow(window);
}
SDL_Quit();
return 0;
}

Don't mix SDL_Renderer code and OpenGL. There's no provision (yet, maybe ever) in SDL2 for resetting Renderer GL state (if it's using the GL backend) that you trample with raw GL code.

First, don't mix OpenGL and the SDL_Renderer stuff. Then you actually need to create an OpenGL context for your window before rendering can work. Use this after your SDL_CreateWindow call:
SDL_GLContext glcontext = SDL_GL_CreateContext(window);
At the end you need to free the context:
SDL_GL_DeleteContext(glcontext);

Related

How to avoid high GPU usage when calling SDL_GL_SwapWindow when using an OpenGL context?

I'm getting a strange behaviour where the GPU usage gets very high and sometimes just stays below 2%.
Right now I'm using an OpenGL context to make draw calls, you can see the code I'm using to reproduce this bug right here:
#include <GL/glew.h>
#include <SDL2/SDL.h>
#include <string>
static const int DEFAULT_WINDOW_WIDTH = 1280;
static const int DEFAULT_WINDOW_HEIGHT = 720;
int main() {
SDL_Init(SDL_INIT_VIDEO);
#ifdef __APPLE__
SDL_GL_SetAttribute(SDL_GL_CONTEXT_FLAGS,
SDL_GL_CONTEXT_FORWARD_COMPATIBLE_FLAG);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
#endif
SDL_SetHint(SDL_HINT_RENDER_DRIVER, "opengl");
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_GL_SetAttribute(SDL_GL_STENCIL_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_Window *window =
SDL_CreateWindow(nullptr, SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED,
DEFAULT_WINDOW_WIDTH, DEFAULT_WINDOW_HEIGHT,
SDL_WINDOW_OPENGL | SDL_WINDOW_ALLOW_HIGHDPI);
SDL_GLContext glcontext = SDL_GL_CreateContext(window);
glewExperimental = GL_TRUE;
glewInit();
SDL_GL_MakeCurrent(window, glcontext);
SDL_GL_SetSwapInterval(1);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
SDL_Event event;
bool quit = false;
while (!quit) {
while (SDL_PollEvent(&event)) {
quit = SDL_QUIT == event.type;
}
SDL_GL_SwapWindow(window);
}
SDL_GL_DeleteContext(glcontext);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
I isolated this down to just the SDL_GL_SwapWindow call, if you look at the screenshot you can see it's taking 26% of GPU usage just on its own. This is random, if I wait a couple of hours and come back de GPU usage will be around 2%.
I closed all other applications and processes to check this is not happening when other apps are open.
I tried switching GLEW and SDL for GLAD and GLFW and I still get the same result. All this is in macos catalina 10.15.7
Is this normal?

OpenGL renderer with ImGui and SDL2

I'm trying to use all 3 libraries or whatnot but i'm quite confused by the sample code and I can't quite follow the documentation. This is the code and ill explain my confusions below:
#include <iostream>
#include <string>
#include <SDL2/SDL.h>
#include <GL/glew.h>
#include <imgui/imgui.h>
#include <imgui/imgui_stdlib.h>
#include <imgui/imgui_impl_sdl.h>
#include <imgui/imgui_impl_opengl3.h>
// Main code
int main(int argc, char* argv[])
{
if (SDL_Init(SDL_INIT_VIDEO) != 0)
{
std::cout << SDL_GetError() << std::endl;
return -1;
}
// GL 3.0 + GLSL 130
const char* glsl_version = "#version 130";
SDL_GL_SetAttribute(SDL_GL_CONTEXT_FLAGS, 0);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 0);
// Create window with graphics context
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_GL_SetAttribute(SDL_GL_STENCIL_SIZE, 8);
SDL_WindowFlags window_flags = (SDL_WindowFlags)(SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE | SDL_WINDOW_ALLOW_HIGHDPI);
SDL_Window* window = SDL_CreateWindow("Dear ImGui SDL2+OpenGL3 example", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 1280, 720, window_flags);
SDL_GLContext gl_context = SDL_GL_CreateContext(window);
SDL_GL_MakeCurrent(window, gl_context);
SDL_GL_SetSwapInterval(0); // Disable vsync
if (glewInit() != GLEW_OK) {
std::cout << "Error initializing glew\n";
}
IMGUI_CHECKVERSION();
ImGui::CreateContext();
ImGuiIO& io = ImGui::GetIO();
io.ConfigFlags |= ImGuiConfigFlags_NavEnableKeyboard; // Enable Keyboard Controls
//io.ConfigFlags |= ImGuiConfigFlags_NavEnableGamepad; // Enable Gamepad Controls
// Setup Dear ImGui style
ImGui::StyleColorsDark();
//ImGui::StyleColorsClassic();
// Setup Platform/Renderer backends
ImGui_ImplSDL2_InitForOpenGL(window, gl_context);
ImGui_ImplOpenGL3_Init(glsl_version);
ImFont* font = io.Fonts->AddFontFromFileTTF("C:\\Windows\\Fonts\\Arial.ttf", 30.0f);
ImVec4 clear_color = ImVec4(0.45f, 0.55f, 0.60f, 1.00f);
// Main loop
bool running = false;
SDL_Event event;
while (!running)
{
while (SDL_PollEvent(&event))
{
ImGui_ImplSDL2_ProcessEvent(&event);
if (event.type == SDL_QUIT)
running = true;
if (event.type == SDL_WINDOWEVENT && event.window.event == SDL_WINDOWEVENT_CLOSE && event.window.windowID == SDL_GetWindowID(window))
running = true;
}
// Start the Dear ImGui frame
ImGui_ImplOpenGL3_NewFrame();
ImGui_ImplSDL2_NewFrame(window);
ImGui::NewFrame();
{
static std::string buf = "";
ImGui::PushFont(font);
ImGui::Begin("Window");
ImGui::InputText("Hello", &buf);
//std::cout << io.Fonts->Fonts.size() << std::endl;
ImGui::Text("Application average %.3f ms/frame (%.1f FPS)", 1000.0f / ImGui::GetIO().Framerate, ImGui::GetIO().Framerate);
ImGui::End();
ImGui::PopFont();
}
// Rendering
glViewport(0, 0, (int)io.DisplaySize.x, (int)io.DisplaySize.y);
glClearColor(clear_color.x, clear_color.y, clear_color.z, clear_color.w);
glClear(GL_COLOR_BUFFER_BIT);
ImGui::Render();
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
SDL_GL_SwapWindow(window);
}
// Cleanup
ImGui_ImplOpenGL3_Shutdown();
ImGui_ImplSDL2_Shutdown();
ImGui::DestroyContext();
SDL_GL_DeleteContext(gl_context);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
There are a few confusions I have here, starting with there being no SDL_Renderer anywhere within the code. I notice that the display draw color is handled by OpenGL, but the rendering is called via glClear(GL_COLOR_BUFFER_BIT); (I THINK). I'm unsure, though, how I could actually then call any SDL2 functions such as SDL_RenderFillRect() with no SDL_Renderer ? My best hint is this line:
ImGui::Render();
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
SDL_GL_SwapWindow(window);
where its SDL_GL_SwapWindow() but this I believe just also renders for the OpenGL? I'm not really sure what line out of all the rending actually does what. I mean I would have thought ImGui::Render() would render all ImGui things, but then theres an ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData()); and then I'm not sure how the SDL_GL_SwapWindow ties in since i've already called glClear(). Additionally, why is there a function called ImGui::EndFrame() but not called in the sample code at the end of a frame and then there is ImGui::NewFrame() for each loop and same for ImGui_ImplOpenGL3_NewFrame(); ImGui_ImplSDL2_NewFrame(window); Can someone please explain some of these things its very confusing.
SDL_Renderer is something you need if you want to use the SDL API for drawing tasks, but it is not required if you just create the OpenGL context with SDL and do all the drawing directly with OpenGL.
but the rendering is called via glClear(GL_COLOR_BUFFER_BIT);
No, glClearclearse part of the current render buffer, in this case, the color (What is the purpose of GL_COLOR_BUFFER_BIT and GL_DEPTH_BUFFER_BIT?
)
SDL_GL_SwapWindow(window); brings the contents of the current render buffer (the rendering) to the window SDL_GL_SwapWindow
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData()); invokes he drawing of the ImGUI components.

Does SFML support transparency?

Hi I am trying to draw a translucent sphere using OpenGL / glut;
Easiest thing I think is to post the code: This is what I have at the moment;
glutInit(&argc, argv);
sf::ContextSettings settings;
settings.depthBits = 32;
settings.stencilBits = 0;
settings.antialiasingLevel = 0;
sf::Window window(sf::VideoMode(800, 600), "insert title", sf::Style::Default, settings);
window.setVerticalSyncEnabled(true);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
glClearColor(0.0,0.0,0.0,0.0);
bool running = true;
while(running)
{
sf::Event e;
while(window.pollEvent(e))
{
if(e.type == sf::Event::Closed)
{
running = false;
}
if(e.type == sf::Event::Resized)
{
glViewport(0, 0, e.size.width, e.size.height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45.0, (double)e.size.width / (double)e.size.height, 0.1, 2000.0);
gluLookAt(0,0,0, 1,0,0, 0,1,0);
glMatrixMode(GL_MODELVIEW);
}
}
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
glColor4d(1.0, 0.0, 0.0, 0.5);
glutWireSphere(0.5, 4, 4);
window.display();
}
Ran this thing under optirun and without optirun... No transparency though!
I read somewhere the context must have an alpha buffer... Does SFML support this?
Your code is very confused.
FreeGLUT and SFML do the same thing with regard to OpenGL: they create windows with OpenGL contexts. You should not be using them both in the same application.
Also, you can't call OpenGL functions until you've created the window. And those functions will only affect the current context. Those functions will have no effect on the new context you create with SFML. Your code simply doesn't make sense.

sdl osx eclipse c++ display error

I've got a problem in running SDL app in eclipse under osx.
#include <SDL/SDL_opengl.h>
#include <SDL/SDL.h>
#include <SDL_ttf/SDL_ttf.h>
#include <SDL_image/SDL_image.h>
#include <iostream.h>
int main(int argc, char* argv[]){
int error;
error = SDL_Init(SDL_INIT_EVERYTHING);
std::cout << "error " << error << std::endl;
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 8);
Uint32 flags;
flags = SDL_OPENGL | SDL_HWSURFACE | SDL_HWPALETTE| SDL_DOUBLEBUF ;
drawContext = SDL_SetVideoMode(1024, 768, 16, flags);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0f, 1024, 768, 0.0f, 0.0f, 1000.0f);
glMatrixMode(GL_MODELVIEW);
while(true){
glFinish();
SDL_GL_SwapBuffers();
}
}
this get weired output.
This only happens using the
SDL_OPENGL
flag.
Any ideas?
You never bother to clear the default framebuffer to a known value and the GL implementation isn't required to give you a cleared buffer.
OpenGL 4.3 spec, page 254, paragraph 1 (emphasis mine):
The default framebuffer is initially used as the draw and read framebuffer,
and the initial state of all provided bitplanes is undefined. The format and encoding of buffers in the draw and read framebuffers can be queried as described in section 9.2.3.
Clear the framebuffer sometime before you swap:
while(true)
{
glClear( GL_COLOR_BUFFER_BIT );
// draw stuff
SDL_GL_SwapBuffers();
}

OpenGL /GLUT Creating window after mainEventLoop()

I am developing a game in OpenGL/GLUT and I need to open a new window to show the score when the game is won.
In order to do this, i will call glutCreateWindow() and register the callbacks after calling mainEventLoop().
Is there a problem with this ? How should I do it properly ?
Is there a problem with this?
Yes.
Why don't you simply draw the results in the same window as the game?
Why are you using GLUT in the first place? It's not a very good framework for games. Better use GLFW or SDL.
How should I do it properly ?
By adding a small GUI system to your engine, that allows you to overlay the screen with stats (like a HUD) and a score screen.
You will need two display callback functions, display( ) and display2( ) for each window plus window = glutCreateWindow("Window 1"); and window2 = glutCreateWindow("Window 2");.
Code example :
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <GL/glut.h>
int window2 = 0, window = 0, width = 400, height = 400;
void display(void)
{
glClearColor(0.0, 1.0, 1.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glLoadIdentity();
printf("display1\n");
glFlush();
}
void display2(void)
{
glClearColor(1.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT);
glLoadIdentity();
printf("display2\n");
glFlush();
}
void reshape (int w, int h)
{
glViewport(0,0,(GLsizei)w,(GLsizei)h);
glutPostRedisplay();
}
int main(int argc, char **argv)
{
// Initialization stuff
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGB);
glutInitWindowSize(width, height);
// Create window main
window = glutCreateWindow("Window 1");
glutDisplayFunc(display);
glutReshapeFunc(reshape);
glutInitWindowPosition(100,100);
// Create second window
window2 = glutCreateWindow("Window 2");
glutDisplayFunc(display2);
glutReshapeFunc(reshape);
// Enter Glut Main Loop and wait for events
glutMainLoop();
return 0;
}