I've got a problem in running SDL app in eclipse under osx.
#include <SDL/SDL_opengl.h>
#include <SDL/SDL.h>
#include <SDL_ttf/SDL_ttf.h>
#include <SDL_image/SDL_image.h>
#include <iostream.h>
int main(int argc, char* argv[]){
int error;
error = SDL_Init(SDL_INIT_EVERYTHING);
std::cout << "error " << error << std::endl;
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 8);
Uint32 flags;
flags = SDL_OPENGL | SDL_HWSURFACE | SDL_HWPALETTE| SDL_DOUBLEBUF ;
drawContext = SDL_SetVideoMode(1024, 768, 16, flags);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0f, 1024, 768, 0.0f, 0.0f, 1000.0f);
glMatrixMode(GL_MODELVIEW);
while(true){
glFinish();
SDL_GL_SwapBuffers();
}
}
this get weired output.
This only happens using the
SDL_OPENGL
flag.
Any ideas?
You never bother to clear the default framebuffer to a known value and the GL implementation isn't required to give you a cleared buffer.
OpenGL 4.3 spec, page 254, paragraph 1 (emphasis mine):
The default framebuffer is initially used as the draw and read framebuffer,
and the initial state of all provided bitplanes is undefined. The format and encoding of buffers in the draw and read framebuffers can be queried as described in section 9.2.3.
Clear the framebuffer sometime before you swap:
while(true)
{
glClear( GL_COLOR_BUFFER_BIT );
// draw stuff
SDL_GL_SwapBuffers();
}
Related
I'm trying to use all 3 libraries or whatnot but i'm quite confused by the sample code and I can't quite follow the documentation. This is the code and ill explain my confusions below:
#include <iostream>
#include <string>
#include <SDL2/SDL.h>
#include <GL/glew.h>
#include <imgui/imgui.h>
#include <imgui/imgui_stdlib.h>
#include <imgui/imgui_impl_sdl.h>
#include <imgui/imgui_impl_opengl3.h>
// Main code
int main(int argc, char* argv[])
{
if (SDL_Init(SDL_INIT_VIDEO) != 0)
{
std::cout << SDL_GetError() << std::endl;
return -1;
}
// GL 3.0 + GLSL 130
const char* glsl_version = "#version 130";
SDL_GL_SetAttribute(SDL_GL_CONTEXT_FLAGS, 0);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 0);
// Create window with graphics context
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_GL_SetAttribute(SDL_GL_STENCIL_SIZE, 8);
SDL_WindowFlags window_flags = (SDL_WindowFlags)(SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE | SDL_WINDOW_ALLOW_HIGHDPI);
SDL_Window* window = SDL_CreateWindow("Dear ImGui SDL2+OpenGL3 example", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 1280, 720, window_flags);
SDL_GLContext gl_context = SDL_GL_CreateContext(window);
SDL_GL_MakeCurrent(window, gl_context);
SDL_GL_SetSwapInterval(0); // Disable vsync
if (glewInit() != GLEW_OK) {
std::cout << "Error initializing glew\n";
}
IMGUI_CHECKVERSION();
ImGui::CreateContext();
ImGuiIO& io = ImGui::GetIO();
io.ConfigFlags |= ImGuiConfigFlags_NavEnableKeyboard; // Enable Keyboard Controls
//io.ConfigFlags |= ImGuiConfigFlags_NavEnableGamepad; // Enable Gamepad Controls
// Setup Dear ImGui style
ImGui::StyleColorsDark();
//ImGui::StyleColorsClassic();
// Setup Platform/Renderer backends
ImGui_ImplSDL2_InitForOpenGL(window, gl_context);
ImGui_ImplOpenGL3_Init(glsl_version);
ImFont* font = io.Fonts->AddFontFromFileTTF("C:\\Windows\\Fonts\\Arial.ttf", 30.0f);
ImVec4 clear_color = ImVec4(0.45f, 0.55f, 0.60f, 1.00f);
// Main loop
bool running = false;
SDL_Event event;
while (!running)
{
while (SDL_PollEvent(&event))
{
ImGui_ImplSDL2_ProcessEvent(&event);
if (event.type == SDL_QUIT)
running = true;
if (event.type == SDL_WINDOWEVENT && event.window.event == SDL_WINDOWEVENT_CLOSE && event.window.windowID == SDL_GetWindowID(window))
running = true;
}
// Start the Dear ImGui frame
ImGui_ImplOpenGL3_NewFrame();
ImGui_ImplSDL2_NewFrame(window);
ImGui::NewFrame();
{
static std::string buf = "";
ImGui::PushFont(font);
ImGui::Begin("Window");
ImGui::InputText("Hello", &buf);
//std::cout << io.Fonts->Fonts.size() << std::endl;
ImGui::Text("Application average %.3f ms/frame (%.1f FPS)", 1000.0f / ImGui::GetIO().Framerate, ImGui::GetIO().Framerate);
ImGui::End();
ImGui::PopFont();
}
// Rendering
glViewport(0, 0, (int)io.DisplaySize.x, (int)io.DisplaySize.y);
glClearColor(clear_color.x, clear_color.y, clear_color.z, clear_color.w);
glClear(GL_COLOR_BUFFER_BIT);
ImGui::Render();
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
SDL_GL_SwapWindow(window);
}
// Cleanup
ImGui_ImplOpenGL3_Shutdown();
ImGui_ImplSDL2_Shutdown();
ImGui::DestroyContext();
SDL_GL_DeleteContext(gl_context);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
There are a few confusions I have here, starting with there being no SDL_Renderer anywhere within the code. I notice that the display draw color is handled by OpenGL, but the rendering is called via glClear(GL_COLOR_BUFFER_BIT); (I THINK). I'm unsure, though, how I could actually then call any SDL2 functions such as SDL_RenderFillRect() with no SDL_Renderer ? My best hint is this line:
ImGui::Render();
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
SDL_GL_SwapWindow(window);
where its SDL_GL_SwapWindow() but this I believe just also renders for the OpenGL? I'm not really sure what line out of all the rending actually does what. I mean I would have thought ImGui::Render() would render all ImGui things, but then theres an ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData()); and then I'm not sure how the SDL_GL_SwapWindow ties in since i've already called glClear(). Additionally, why is there a function called ImGui::EndFrame() but not called in the sample code at the end of a frame and then there is ImGui::NewFrame() for each loop and same for ImGui_ImplOpenGL3_NewFrame(); ImGui_ImplSDL2_NewFrame(window); Can someone please explain some of these things its very confusing.
SDL_Renderer is something you need if you want to use the SDL API for drawing tasks, but it is not required if you just create the OpenGL context with SDL and do all the drawing directly with OpenGL.
but the rendering is called via glClear(GL_COLOR_BUFFER_BIT);
No, glClearclearse part of the current render buffer, in this case, the color (What is the purpose of GL_COLOR_BUFFER_BIT and GL_DEPTH_BUFFER_BIT?
)
SDL_GL_SwapWindow(window); brings the contents of the current render buffer (the rendering) to the window SDL_GL_SwapWindow
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData()); invokes he drawing of the ImGUI components.
I want to use OpenGL 3.1.
I'm using a Macbook Pro with 2 graphic cards: NVIDIA GeForce GT 650M 1024 MB, and Intel HD Graphics 4000 1536 MB. They both support up to OpenGL 4.1.
Previously i was able to draw a triangle however, my program was using version 2.1. Therefore I added: SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);. However, now the triangle is no longer drawn.
#include <string>
#include <iostream>
#include <SDL2/SDL.h>
#define GL3_PROTOTYPES 1
#include "../include/GL3/gl3.h"
int main(int argc, const char *argv[]) {
// Initialize the SDL
if(SDL_Init(SDL_INIT_VIDEO) < 0) {
std::cout << "Failed to initialize the SDL: " << SDL_GetError() << std::endl;
SDL_Quit();
return -1;
}
// Configure the SDL to use OpenGL 3.1
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 1);
// ======= HERE =======
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
// ====================
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_Window* window = SDL_CreateWindow("Triangle Test", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 800, 600, SDL_WINDOW_SHOWN | SDL_WINDOW_OPENGL);
if (window == 0) {
std::cout << "Error when creating the window: " << SDL_GetError() << std::endl;
SDL_Quit();
return -1;
}
// Create the OpenGL context
SDL_GLContext contextOpenGL = SDL_GL_CreateContext(window);
// Initialization may fail
if (contextOpenGL == 0) {
std::cout << SDL_GetError() << std::endl;
SDL_DestroyWindow(window);
SDL_Quit();
return -1;
}
SDL_Event events;
bool end = false;
// Define the vertices of our triangle
static const GLfloat vertices[] = {0.0, 1.0, // left point
-0.5, 0.0, // right point
0.5, 0.0}; // upper point
const int TRIANGLE_IDX = 0;
while(!end) {
SDL_WaitEvent(&events);
if(events.window.event == SDL_WINDOWEVENT_CLOSE) {
end = true;
}
// Clear the screen
glClear(GL_COLOR_BUFFER_BIT);
// Send vertices to OpenGL
glVertexAttribPointer(TRIANGLE_IDX, 2, GL_FLOAT, GL_FALSE, 0, vertices);
// Activate our vertex array
glEnableVertexAttribArray(TRIANGLE_IDX);
// Draw the points passed previously
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(TRIANGLE_IDX);
// Refresh the screen
SDL_GL_SwapWindow(window);
}
return 0;
}
I tried to first use glGenBuffers, glBindBuffer and glBufferData but i could not manage to make it work.
The Fixed Function Pipeline has been removed from core OpenGL 3.1 and above.
You will have to use shaders instead. This site has a nice example of how to use them.
I'm using OpenGl in c++ to draw '+' symbols. My screen has a resolution of 1920x1080, but unfortunately with my method of drawing ("glBegin(GL_LINES)" ) only works on a 1080x1080 rectangle. For x > screen_height everything gets cut off. However, I can still locate text (using Glut) on the full 1920x1080 surface. Could you help me to find what setting is at fault?
I attached a code snippet. This may not compile, as I only included what is used for the drawing of the '+' symbol.
Much appreciated. Thank you in advance.
// Libraries needed for OpenGL and strings
#include <GL/glut.h>
#include <GLFW/glfw3.h>
// header file required for this cpp file
#include "window2.hxx"
int main(int argc, char *argv[])
{
// Start GLFW as main OpenGL frontend
GLFWwindow* window;
if( !glfwInit() )
{
fprintf( stderr, "Failed to initialize GLFW\n" );
exit( EXIT_FAILURE );
}
// Later upstream GLut is used for text rendering: thus also initialize GLUT (freeglut3)
glutInit(&argc, argv);
// check the resolution of the monitor and pass it to the create window function
const GLFWvidmode* mode = glfwGetVideoMode(glfwGetPrimaryMonitor());
screen_width = mode->width;
screen_height = mode->height;
window = glfwCreateWindow(screen_width , screen_height, "LearnOpenGL", glfwGetPrimaryMonitor(), NULL );
if (!window)
{
fprintf( stderr, "Failed to open GLFW window\n" );
glfwTerminate();
exit( EXIT_FAILURE );
}
glfwMakeContextCurrent(window);
glfwSwapInterval( 1 );
// set up view
glViewport( 0, 0, screen_height, screen_width );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
// see https://www.opengl.org/sdk/docs/man2/xhtml/glOrtho.xml
glOrtho(0.0,screen_height,0.0,screen_width,0.0,1.0); // this creates a canvas for 2D drawing on
x_1 = 500;
y_1 = 100;
while( !glfwWindowShouldClose(window) ) {
// Draw gears
render_loop();
// Swap buffers
glfwSwapBuffers(window);
glfwPollEvents();
} // glfw while loop
}
//OpenGL Draw function
void render_loop()
{
// white background
glClearColor ( 1.0f, 1.0f, 1.0f, 1.0f );
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glPointSize(10);
glLineWidth(1);
// push matrix
glPushMatrix();
glColor3f(0.0, 0.0, 1.0);
glBegin(GL_LINES);
glVertex2i(x_1-10,y_1);
glVertex2i(x_1+10,y_1);
glVertex2i(x_1,y_1-10);
glVertex2i(x_1,y_1+10);
glEnd();
// pop matrix
glPopMatrix();
}
I think you've got your dimensions the wrong way round:
// set up view
glViewport( 0, 0, screen_height, screen_width );
...
glOrtho(0.0,screen_height,0.0,screen_width,0.0,1.0);
Try
glViewport( 0, 0, screen_width, screen_height);
...
glOrtho(0.0,screen_width,0.0,screen_height,0.0,1.0);
I wrote a simple parser for the ASCII STL format. When I try to render the triangles with the supplied normals, the resulting object is missing many faces:
This is how it should look like:
What I already tried:
explicitly disabled backface culling (though it shouldn't have been active before)
ensured that the depth buffer is enabled
Here is a minimal sample program which reproduces the error:
#include <SDL2/SDL.h>
#include <SDL2/SDL_main.h>
#include <SDL2/SDL_render.h>
#include <SDL2/SDL_opengl.h>
int main(int argc, char **argv) {
SDL_Init(SDL_INIT_VIDEO);
int screen_w=1280,screen_h=720;
SDL_Window * win = SDL_CreateWindow("test", 20, 20, screen_w, screen_h,
SDL_WINDOW_OPENGL);
SDL_GLContext glcontext = SDL_GL_CreateContext(win);
STLParser stlparser;
std::ifstream file(".\\logo.stl");
stlparser.parseAscii(file);
const auto& ndata = stlparser.getNData();
const auto& vdata = stlparser.getVData();
std::cout << "number of facets: " << ndata.size() << std::endl;
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
glMatrixMode(GL_PROJECTION | GL_MODELVIEW);
glLoadIdentity();
glScalef(1.f, -1.f, 1.f);
glOrtho(0, screen_w, 0, screen_h, -screen_w, screen_w);
glClearDepth(1.0f);
glDepthFunc(GL_LEQUAL);
glEnable(GL_DEPTH_TEST);
glDisable(GL_CULL_FACE);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_VERTEX_ARRAY);
glNormalPointer(GL_FLOAT, 0, ndata.data());
glVertexPointer(3, GL_FLOAT, 0, vdata.data());
SDL_Event event;
bool quit = false;
while (!quit) {
while (SDL_PollEvent(&event))
switch(event.type) {
case SDL_QUIT: quit = true; break;
}
;
// Drawing
glClearColor(255,255,255,255);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glTranslatef(screen_w/2,0,0);
glRotatef(0.5,0,1,0);
glTranslatef(-screen_w/2,0,0);
glPushMatrix();
glTranslatef(screen_w/2,screen_h/2,0);
glColor3f(0.5,0.5,0);
glDrawArrays(GL_TRIANGLES, 0, vdata.size());
glPopMatrix();
SDL_GL_SwapWindow(win);
SDL_Delay(10);
}
SDL_DestroyWindow(win);
SDL_Quit();
return 0;
}
The STLParser methods getNData() and getVData() have the following signatures:
const std::vector<std::array<float,3>>& getNData() const;
const std::vector<std::array<std::array<float,3>,3>>& getVData() const;
STLParser output should be correct, but I can provide the sources as well if needed.
What am I doing wrong?
You should change
glDrawArrays(GL_TRIANGLES, 0, vdata.size());
to
glDrawArrays(GL_TRIANGLES, 0, 3 * vdata.size());
I.e. count should be vertex count, but not triangle count.
I am trying to use sdl as a window manager for openGL. I looked into using Windows native API, but looked to confusing.
With that being said, I have a class Window which I would like to wrap all the SDL stuff in for my windows management right now. Figure it will let me swap out windows management later if I find I do not want to use SDL.
I am guessing that a lot of openGL initialization code only needs to be run one time.
if(SDL_Init(SDL_INIT_EVERYTHING) < 0 ) {
exit(0x1);
}
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_BUFFER_SIZE, 32);
SDL_GL_SetAttribute(SDL_GL_ACCUM_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ACCUM_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ACCUM_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ACCUM_ALPHA_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_MULTISAMPLEBUFFERS, 1);
SDL_GL_SetAttribute(SDL_GL_MULTISAMPLESAMPLES, 2);
Then in the class constructor I can create the window with
Window::Window(int winW, int winH) {
if((Surf_Display = SDL_SetVideoMode(winW,winH,32, SDL_HWSURFACE | SDL_GL_DOUBLEBUFFER | SDL_OPENGL | SDL_RESIZABLE )) == NULL) {
exit(2);
}
glClearColor(0, 0, 0, 0);
glClearDepth(1.0f);
glViewport(0, 0, winW, winH);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, winW, winH, 0, 1, -1);
glMatrixMode(GL_MODELVIEW);
glEnable (GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glLoadIdentity();
}
I'm just not sure how to go about doing this. If I put the code before I define the class in the header, does this achieve the desired result?
;init code
;class window { };
The simplest thing would be to put that initialisation code into a function and the to just call this function from main:
/* header */
void init_window_management (void);
/* some source file */
void init_window_management (void) {
// your code
}
/* main file */
// ... also include that header ...
int main(int argc, char ** argv) {
// ...
init_window_management();
// ... use instances of the window class
}
Then there's also std::call_once.
If I put the code before I define the class in the header, does this achieve the desired result?
No. A header is for function and class declarations. Code to execute lives in (member) functions, these are then called (ultimately) via the main function.