Execute class code once C++ - c++

I am trying to use sdl as a window manager for openGL. I looked into using Windows native API, but looked to confusing.
With that being said, I have a class Window which I would like to wrap all the SDL stuff in for my windows management right now. Figure it will let me swap out windows management later if I find I do not want to use SDL.
I am guessing that a lot of openGL initialization code only needs to be run one time.
if(SDL_Init(SDL_INIT_EVERYTHING) < 0 ) {
exit(0x1);
}
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_BUFFER_SIZE, 32);
SDL_GL_SetAttribute(SDL_GL_ACCUM_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ACCUM_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ACCUM_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ACCUM_ALPHA_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_MULTISAMPLEBUFFERS, 1);
SDL_GL_SetAttribute(SDL_GL_MULTISAMPLESAMPLES, 2);
Then in the class constructor I can create the window with
Window::Window(int winW, int winH) {
if((Surf_Display = SDL_SetVideoMode(winW,winH,32, SDL_HWSURFACE | SDL_GL_DOUBLEBUFFER | SDL_OPENGL | SDL_RESIZABLE )) == NULL) {
exit(2);
}
glClearColor(0, 0, 0, 0);
glClearDepth(1.0f);
glViewport(0, 0, winW, winH);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, winW, winH, 0, 1, -1);
glMatrixMode(GL_MODELVIEW);
glEnable (GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glLoadIdentity();
}
I'm just not sure how to go about doing this. If I put the code before I define the class in the header, does this achieve the desired result?
;init code
;class window { };

The simplest thing would be to put that initialisation code into a function and the to just call this function from main:
/* header */
void init_window_management (void);
/* some source file */
void init_window_management (void) {
// your code
}
/* main file */
// ... also include that header ...
int main(int argc, char ** argv) {
// ...
init_window_management();
// ... use instances of the window class
}
Then there's also std::call_once.
If I put the code before I define the class in the header, does this achieve the desired result?
No. A header is for function and class declarations. Code to execute lives in (member) functions, these are then called (ultimately) via the main function.

Related

OpenGL renderer with ImGui and SDL2

I'm trying to use all 3 libraries or whatnot but i'm quite confused by the sample code and I can't quite follow the documentation. This is the code and ill explain my confusions below:
#include <iostream>
#include <string>
#include <SDL2/SDL.h>
#include <GL/glew.h>
#include <imgui/imgui.h>
#include <imgui/imgui_stdlib.h>
#include <imgui/imgui_impl_sdl.h>
#include <imgui/imgui_impl_opengl3.h>
// Main code
int main(int argc, char* argv[])
{
if (SDL_Init(SDL_INIT_VIDEO) != 0)
{
std::cout << SDL_GetError() << std::endl;
return -1;
}
// GL 3.0 + GLSL 130
const char* glsl_version = "#version 130";
SDL_GL_SetAttribute(SDL_GL_CONTEXT_FLAGS, 0);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 0);
// Create window with graphics context
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_GL_SetAttribute(SDL_GL_STENCIL_SIZE, 8);
SDL_WindowFlags window_flags = (SDL_WindowFlags)(SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE | SDL_WINDOW_ALLOW_HIGHDPI);
SDL_Window* window = SDL_CreateWindow("Dear ImGui SDL2+OpenGL3 example", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 1280, 720, window_flags);
SDL_GLContext gl_context = SDL_GL_CreateContext(window);
SDL_GL_MakeCurrent(window, gl_context);
SDL_GL_SetSwapInterval(0); // Disable vsync
if (glewInit() != GLEW_OK) {
std::cout << "Error initializing glew\n";
}
IMGUI_CHECKVERSION();
ImGui::CreateContext();
ImGuiIO& io = ImGui::GetIO();
io.ConfigFlags |= ImGuiConfigFlags_NavEnableKeyboard; // Enable Keyboard Controls
//io.ConfigFlags |= ImGuiConfigFlags_NavEnableGamepad; // Enable Gamepad Controls
// Setup Dear ImGui style
ImGui::StyleColorsDark();
//ImGui::StyleColorsClassic();
// Setup Platform/Renderer backends
ImGui_ImplSDL2_InitForOpenGL(window, gl_context);
ImGui_ImplOpenGL3_Init(glsl_version);
ImFont* font = io.Fonts->AddFontFromFileTTF("C:\\Windows\\Fonts\\Arial.ttf", 30.0f);
ImVec4 clear_color = ImVec4(0.45f, 0.55f, 0.60f, 1.00f);
// Main loop
bool running = false;
SDL_Event event;
while (!running)
{
while (SDL_PollEvent(&event))
{
ImGui_ImplSDL2_ProcessEvent(&event);
if (event.type == SDL_QUIT)
running = true;
if (event.type == SDL_WINDOWEVENT && event.window.event == SDL_WINDOWEVENT_CLOSE && event.window.windowID == SDL_GetWindowID(window))
running = true;
}
// Start the Dear ImGui frame
ImGui_ImplOpenGL3_NewFrame();
ImGui_ImplSDL2_NewFrame(window);
ImGui::NewFrame();
{
static std::string buf = "";
ImGui::PushFont(font);
ImGui::Begin("Window");
ImGui::InputText("Hello", &buf);
//std::cout << io.Fonts->Fonts.size() << std::endl;
ImGui::Text("Application average %.3f ms/frame (%.1f FPS)", 1000.0f / ImGui::GetIO().Framerate, ImGui::GetIO().Framerate);
ImGui::End();
ImGui::PopFont();
}
// Rendering
glViewport(0, 0, (int)io.DisplaySize.x, (int)io.DisplaySize.y);
glClearColor(clear_color.x, clear_color.y, clear_color.z, clear_color.w);
glClear(GL_COLOR_BUFFER_BIT);
ImGui::Render();
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
SDL_GL_SwapWindow(window);
}
// Cleanup
ImGui_ImplOpenGL3_Shutdown();
ImGui_ImplSDL2_Shutdown();
ImGui::DestroyContext();
SDL_GL_DeleteContext(gl_context);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
There are a few confusions I have here, starting with there being no SDL_Renderer anywhere within the code. I notice that the display draw color is handled by OpenGL, but the rendering is called via glClear(GL_COLOR_BUFFER_BIT); (I THINK). I'm unsure, though, how I could actually then call any SDL2 functions such as SDL_RenderFillRect() with no SDL_Renderer ? My best hint is this line:
ImGui::Render();
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
SDL_GL_SwapWindow(window);
where its SDL_GL_SwapWindow() but this I believe just also renders for the OpenGL? I'm not really sure what line out of all the rending actually does what. I mean I would have thought ImGui::Render() would render all ImGui things, but then theres an ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData()); and then I'm not sure how the SDL_GL_SwapWindow ties in since i've already called glClear(). Additionally, why is there a function called ImGui::EndFrame() but not called in the sample code at the end of a frame and then there is ImGui::NewFrame() for each loop and same for ImGui_ImplOpenGL3_NewFrame(); ImGui_ImplSDL2_NewFrame(window); Can someone please explain some of these things its very confusing.
SDL_Renderer is something you need if you want to use the SDL API for drawing tasks, but it is not required if you just create the OpenGL context with SDL and do all the drawing directly with OpenGL.
but the rendering is called via glClear(GL_COLOR_BUFFER_BIT);
No, glClearclearse part of the current render buffer, in this case, the color (What is the purpose of GL_COLOR_BUFFER_BIT and GL_DEPTH_BUFFER_BIT?
)
SDL_GL_SwapWindow(window); brings the contents of the current render buffer (the rendering) to the window SDL_GL_SwapWindow
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData()); invokes he drawing of the ImGUI components.

OpenGL with Gtk+, shapes are not being drawn despite background being cleared

I am trying to get OpenGL to work with gtk+. It seemed to be working size I was able to clear the background color. However, when I went to draw something it is not there. Am I missing something. I put the eye at 10, 10, 10 and I am looking at the origin. I should see a back triangle near the origin.
#include <gtk/gtk.h>
#include <GL/gl.h>
#include <GL/glu.h>
#include <iostream>
GtkWidget* wnd;
GtkWidget* glarea;
static gboolean render(GtkGLArea *area, GdkGLContext *context)
{
int w = gtk_widget_get_allocated_width(GTK_WIDGET(area));
int h = gtk_widget_get_allocated_height(GTK_WIDGET(area));
glViewport(0, 0, w, h);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(180, (double)w / (double)h, 0.1, 100.0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(10, 10, 10, 0, 0, 0, 0, 1, 0);
glClearColor(1, 1, 1, 0);
glClear(GL_COLOR_BUFFER_BIT);
glColor3f(0, 0, 0);
glBegin(GL_TRIANGLES);
glVertex3f(0, 0, 0);
glVertex3f(-1, 2, -1);
glVertex3f(1, 3, 2);
glEnd();
return TRUE;
}
int main(int argc, char *argv[])
{
gtk_init(&argc, &argv);
wnd = gtk_window_new(GTK_WINDOW_TOPLEVEL);
glarea = gtk_gl_area_new();
gtk_container_add(GTK_CONTAINER(wnd), glarea);
g_signal_connect(wnd, "destroy", gtk_main_quit, 0);
g_signal_connect(glarea, "render", G_CALLBACK(render), NULL);
gtk_widget_show_all(wnd);
gtk_main();
return 0;
}
Source: Emanuele Bassi's blog - GTK+ developer
[...] The OpenGL support inside GTK+ requires core GL profiles, and thus it won’t work with the fixed pipeline API that was common until OpenGL 3.2 and later versions. this means that you won’t be able to use API like glRotatef(), or glBegin()/glEnd() pairs, or any of that stuff.
Solution: drop the fixed function pipeline.

Using SDL and openGL drawing primitives does not work

I tried to do some basic stuff like a gameloop and drawing with OpenGL by following a tutorial. In that tutorial SDL 1.2 is used but I am using 2.0 so I had to replace some old functions with the new versions.
Unfortunately the lines I want to draw don't show up.
I already tried playing with the numbers so that the line is not outside the window but it did not work.
#include "SDL.h"
#include "SDL_opengl.h"
#include <iostream>
int main(int argc, char* args[])
{
//initialize SDL
SDL_Init(SDL_INIT_EVERYTHING);
//Set openGL memory usage
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BUFFER_SIZE, 32);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_Window *window;
window=SDL_CreateWindow("MyWindow",
SDL_WINDOWPOS_CENTERED,
SDL_WINDOWPOS_CENTERED,
600,400,
SDL_WINDOW_OPENGL);
SDL_Renderer *renderer;
renderer = SDL_CreateRenderer(window,-1,SDL_RENDERER_ACCELERATED);
SDL_SetRenderDrawColor(renderer,255,255,255,255);//RGB/ALPHA
glShadeModel(GL_SMOOTH);
//2D rendering
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
//Disable depth checking
glDisable(GL_DEPTH_TEST);
std::cout<<"OpenGL is running\n";
std::cout<<"Main loop has started\n";
//Handle GameLoop
bool isRunning=true;
//Handel events
SDL_Event event;
//main gameloop
while(isRunning)
{
//Events
while(SDL_PollEvent(&event))
{
if(event.type==SDL_QUIT)
isRunning=false;
//if esc button is released
if(event.type==SDL_KEYUP&&event.key.keysym.sym==SDLK_ESCAPE)
isRunning=false;
if(event.type==SDL_KEYUP&&event.key.keysym.sym==SDLK_r)
SDL_SetRenderDrawColor(renderer,255,0,0,255);
//Logic for certain events
}
//Logic
//Rendering
SDL_RenderClear(renderer);
SDL_RenderPresent(renderer);
glPushMatrix();
glOrtho(0,600,400,0,-1,1); //Set matrix
glBegin(GL_LINES);//start drawing
glColor4ub(255,0,0,255);
glVertex2i(0,0);
glVertex2i(600,400);
glEnd();// end drawing
glPopMatrix();
SDL_GL_SwapWindow(window);
}
SDL_Quit();
return 0;
}
Don't mix SDL_Renderer code and OpenGL. There's no provision (yet, maybe ever) in SDL2 for resetting Renderer GL state (if it's using the GL backend) that you trample with raw GL code.
First, don't mix OpenGL and the SDL_Renderer stuff. Then you actually need to create an OpenGL context for your window before rendering can work. Use this after your SDL_CreateWindow call:
SDL_GLContext glcontext = SDL_GL_CreateContext(window);
At the end you need to free the context:
SDL_GL_DeleteContext(glcontext);

Rendering the Kinect output with GLUT in Visual Studio 2013 C++ (Stack overflow)

I have been trying to port some useful code for a project, but I'm experiencing some difficulty in making GLUT work properly. Whenever the cbRender function is called, Visual Studio pops up an exception saying that there was a Stack Overflow: "Unhandled exception at 0x00FAD357 in KinectBridgeWithOpenCVBasics-D2D.exe: 0xC00000FD: Stack overflow (parameters: 0x00000000, 0x00272000)."
I have tried putting a breakpoint right at the declaration of cbRender, and by going into the next step has got me in a "chkstk.asm not found" VB page.
Here's my main function so far:
int main(int argc, char** argv) {
HRESULT hr = NuiInitialize(
NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX
| NUI_INITIALIZE_FLAG_USES_COLOR);
if (hr != S_OK)
{
cout << "NuiInitialize failed" << endl;
return hr;
}
// Initialize Display Mode
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_ALPHA | GLUT_DEPTH);
// Initialize OpenGL Window
glutInitWindowSize(window_width, window_height);
glutInitWindowPosition(window_xpos, window_ypos);
GLwindow = glutCreateWindow("Kinect Registration");
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
// Initialize OpenCV Window
namedWindow("Camera 0 | Camera 1", CV_WINDOW_AUTOSIZE);
// Setup The GL Callbacks
glutDisplayFunc(cbRender);
glutReshapeFunc(cbReSizeGLScene);
glutKeyboardFunc(cbKeyPressed);
glutMotionFunc(cbMouseMoved);
glutMouseFunc(cbMousePress);
glutTimerFunc(10, cbTimer, 10);
// Setup The CV Callbacks
cvSetMouseCallback("Camera 0 | Camera 1", cbMouseEvent);
glutMainLoop();
NuiShutdown();
return 0;
}
And the cbRender function:
void cbRender() {
short xyz[window_height][window_width][3];
unsigned char rgb[window_height][window_width][3];
unsigned int indices[window_height][window_width];
//Flush the OpenCV Mat's from last frame
rgbCV.clear();
depthCV.clear();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
glPushMatrix();
glScalef(zoom, zoom, 1);
gluLookAt(0, 0, 3.5, 0, 0, 0, 0, 1.0, 0);
glRotatef(rotangles[0], 1, 0, 0);
glRotatef(rotangles[1], 0, 1, 0);
draw_axes();
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glPointSize(2);
//--------Camera 0 (P)-----------
loadBuffers(0, indices, xyz, rgb);
glVertexPointer(3, GL_SHORT, 0, xyz);
glColorPointer(3, GL_UNSIGNED_BYTE, 0, rgb);
glPushMatrix();
// transform centroid of P to origin and rotate
transformation(0);
// projection matrix (camera specific - Can be improved)
loadVertexMatrix();
glDrawArrays(GL_POINTS, 0, window_width*window_height);
glPopMatrix();
glFlush();
glutSwapBuffers();
glDisable(GL_DEPTH_TEST);
}
I realize that most of the actual code is hidden away in functions, and if any of those functions can be causing the problem just say so and I will provide the code for them, too.
I don't see immediately a problem with your code, however I can give some advice: There are two possible reasons for a stack overflow:
the amount of available stack is exceeded by allocating an object in automatic storage.
a (set of) function is calling themself recursively without properly terminating the recursion.
Luckily stack overflows are very easy to debug: Run your program in a debugger, let the stack overflow happen and retrieve a backtrace / stack trace using the debugger. This will exactly show you which function call(s) are causing the problem.

sdl osx eclipse c++ display error

I've got a problem in running SDL app in eclipse under osx.
#include <SDL/SDL_opengl.h>
#include <SDL/SDL.h>
#include <SDL_ttf/SDL_ttf.h>
#include <SDL_image/SDL_image.h>
#include <iostream.h>
int main(int argc, char* argv[]){
int error;
error = SDL_Init(SDL_INIT_EVERYTHING);
std::cout << "error " << error << std::endl;
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_ALPHA_SIZE, 8);
Uint32 flags;
flags = SDL_OPENGL | SDL_HWSURFACE | SDL_HWPALETTE| SDL_DOUBLEBUF ;
drawContext = SDL_SetVideoMode(1024, 768, 16, flags);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0f, 1024, 768, 0.0f, 0.0f, 1000.0f);
glMatrixMode(GL_MODELVIEW);
while(true){
glFinish();
SDL_GL_SwapBuffers();
}
}
this get weired output.
This only happens using the
SDL_OPENGL
flag.
Any ideas?
You never bother to clear the default framebuffer to a known value and the GL implementation isn't required to give you a cleared buffer.
OpenGL 4.3 spec, page 254, paragraph 1 (emphasis mine):
The default framebuffer is initially used as the draw and read framebuffer,
and the initial state of all provided bitplanes is undefined. The format and encoding of buffers in the draw and read framebuffers can be queried as described in section 9.2.3.
Clear the framebuffer sometime before you swap:
while(true)
{
glClear( GL_COLOR_BUFFER_BIT );
// draw stuff
SDL_GL_SwapBuffers();
}