SDL not displaying shape - c++

Within my project, I've been having trouble getting an triangle to display within OnRender(), but for some reason, nothing other than the background color (green) is visible.
int main(int argc, char **argv)
{
if (!OnInit())
return -1;
SDL_Event Event;
while (_isRunning)
{
while (SDL_PollEvent(&Event))
OnEvent(&Event);
OnRender();
OnLoop();
SDL_GL_SwapWindow(_screen);
}
OnCleanup();
return 0;
}
void generalSetup()
{
// Initialize SDL2
if (SDL_Init(SDL_INIT_VIDEO) < 0)
sdldie("Failed to initial SDL2.");
else
{
/* Request OpenGL 3.2 */
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
// Create window
_screen = SDL_CreateWindow("Window", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED,
800, 600, SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN);
// Create Context
_mainContext = SDL_GL_CreateContext(_screen);
// Create Surface
_surface = SDL_GetWindowSurface(_screen);
SDL_FillRect(_surface, NULL, SDL_MapRGB(_surface->format, 0xCC, 0x20, 0x20));
SDL_UpdateWindowSurface(_screen);
/* swap synchronized */
SDL_GL_SetSwapInterval(1);
// Initialize GLew 1.10
glewExperimental = GL_TRUE;
GLenum error = glewInit();
if (error != GLEW_OK)
printf("Warning: Unable to set VSync! SDL Error: %s\n", SDL_GetError());
else
std::cout << "GLew Initialized" << std::endl;
glClearColor(0, 1, 0, 0);
glViewport(0, 0, 800, 600);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45, 800 / 600, 1, 1000);
gluLookAt(0, 0, 20, 0, 0, 0, 0, 1, 0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
}
bool OnInit()
{
generalSetup();
return true;
}
void OnEvent(SDL_Event* Event)
{
if (Event->type == SDL_QUIT)
_isRunning = false;
}
void OnLoop()
{
}
void OnRender()
{
glClearColor(1.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glPushMatrix();
glTranslatef(0.f, 0.f, -10.f);
glBegin(GL_TRIANGLES);
glColor3f(0.1, 0.2, 0.3);
glVertex3f(0, 0, 0);
glVertex3f(1, 0, 0);
glVertex3f(0, 1, 0);
glEnd();
glPopMatrix();
}
void OnCleanup()
{
SDL_GL_DeleteContext(_mainContext);
SDL_DestroyWindow(_screen);
SDL_Quit();
}

You requested a Core context. None of your immediate-mode (matrix stack, glBegin(), etc.) code will work.
Drop back to a compatibility context (SDL_GL_CONTEXT_PROFILE_COMPATIBILITY) or supply all the necessary shaders, vertex buffers, etc. that Core requires.

You are trying to render through the immediate mode, but it is not supported by the OpenGL 3.2. Try using the version 2.0 or 2.1, which support both shaders (if you are intending to use them) and the immediate mode.

Related

GLFW larger window than screen resolution

When rendering off-screen content with OpenGL I am trying to use a window larger than the current screen resolution. The following code fails to render correctly if so (it renders to a portion of the window), but works OK when the window size is <= the resolution. (Error checks and stuff removed).
void run(int wi2,int he2)
{
glfwInit();
glfwWindowHint(GLFW_VISIBLE, GLFW_FALSE);
win = glfwCreateWindow(wi2, he2, "OpenGL", 0, 0);
glfwMakeContextCurrent(win);
hOpenGL = glfwGetWin32Window(win);
ShowWindow(hOpenGL, SW_HIDE);
if (!loadExtensions())
{
....
}
glEnable(GL_DEPTH_TEST); // Use the Z buffer
glfwSwapInterval(0); // Do not wait for screen refresh between frames
glfwSetWindowSize(win, wi2,he2);
glViewport(0, 0, wi2,he2);
SetEvent(hRun2);
MSG msg;
for (;;)
{
HANDLE he[2] = { hRun1,hRunE };
auto gc = MsgWaitForMultipleObjects(2, he, 0, INFINITE, QS_ALLEVENTS);
if (gc == WAIT_OBJECT_0 + 1)
{
// end
glfwDestroyWindow(win);
win = 0;
break;
}
else
if (gc == WAIT_OBJECT_0)
{
// render
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClearColor(runc.bg.r, runc.bg.g, runc.bg.b, runc.bg.a);
// Load uniforms, others are omitted for simplicity
float wh[4] = { wi2,he2,0,0 };
auto uloc =
glGetUniformLocation(runc.prg, "iResolution");
glUniform3fv(uloc, 1, wh);
glLoadIdentity();
glBegin(GL_POLYGON);
glColor3f(1.0, 1.0, 1.0);
glVertex2i(-wi2 / 2, -he2 / 2);
glVertex2i(wi2 / 2, -he2 / 2);
glVertex2i(wi2 / 2, he2 / 2);
glVertex2i(-wi2 / 2, he2 / 2);
glEnd();
auto r = runc.glOut->GetRawData();
memset(r, 0, wi2 * he2 * 4);
glReadPixels(0, 0, wi2, he2, GL_BGRA, GL_UNSIGNED_BYTE, r);
glfwSwapBuffers(win);
SetEvent(hRun2);
}
else { ... }
}
glfwTerminate();
win = 0;
hOpenGL = 0;
SetEvent(hRun2);
}
Is there a way to have a window rendering larger than the current screen size? Even in software rendering.

Why doesnt the text Render with nvgText() in my implementation but works fine in the Example?

The Example followed from the NanoVG Examples.
DemoData data;
NVGcontext* vg = NULL;
GPUtimer gpuTimer;
PerfGraph fps, cpuGraph, gpuGraph;
double prevt = 0, cpuTime = 0;
if (!glfwInit()) {
printf("Failed to init GLFW.");
return -1;
}
initGraph(&fps, GRAPH_RENDER_FPS, "Frame Time");
initGraph(&cpuGraph, GRAPH_RENDER_MS, "CPU Time");
initGraph(&gpuGraph, GRAPH_RENDER_MS, "GPU Time");
glfwSetErrorCallback(errorcb);
#ifndef _WIN32 // don't require this on win32, and works with more cards
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
#endif
glfwWindowHint(GLFW_OPENGL_DEBUG_CONTEXT, 1);
#ifdef DEMO_MSAA
glfwWindowHint(GLFW_SAMPLES, 4);
#endif
//window = glfwCreateWindow(1000, 600, "NanoVG", NULL, NULL);
window = glfwCreateWindow(1000, 600, "NanoVG", glfwGetPrimaryMonitor(), NULL);
if (!window) {
glfwTerminate();
return -1;
}
glfwSetKeyCallback(window, key);
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK) {
printf("Could not init glew.\n");
return -1;
}
// GLEW generates GL error because it calls glGetString(GL_EXTENSIONS), we'll consume it here.
glGetError();
//vg = nvgCreateGL3(NVG_STENCIL_STROKES | NVG_DEBUG);
vg = nvgCreateGL3(NVG_ANTIALIAS | NVG_STENCIL_STROKES | NVG_DEBUG);
if (vg == NULL) {
printf("Could not init nanovg.\n");
return -1;
}
if (loadDemoData(vg, &data) == -1)
return -1;
glfwSwapInterval(0);
initGPUTimer(&gpuTimer);
glfwSetTime(0);
prevt = glfwGetTime();
while (!glfwWindowShouldClose(window))
{
double mx, my, t, dt;
int winWidth, winHeight;
int fbWidth, fbHeight;
float pxRatio;
float gpuTimes[3];
int i, n;
t = glfwGetTime();
dt = t - prevt;
prevt = t;
startGPUTimer(&gpuTimer);
glfwGetCursorPos(window, &mx, &my);
glfwGetWindowSize(window, &winWidth, &winHeight);
glfwGetFramebufferSize(window, &fbWidth, &fbHeight);
// Calculate pixel ration for hi-dpi devices.
pxRatio = (float)fbWidth / (float)winWidth;
// Update and render
glViewport(0, 0, fbWidth, fbHeight);
if (premult)
glClearColor(0, 0, 0, 0);
else
glClearColor(0.3f, 0.3f, 0.32f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
nvgBeginFrame(vg, winWidth, winHeight, pxRatio);
renderDemo(vg, mx, my, winWidth, winHeight, t, blowup, &data);
//works here
//char sample[] = "Sample";
//nvgBeginPath(vg);
//nvgFontSize(vg, 18.0f);
//nvgFontFace(vg, "sans");
//nvgTextAlign(vg, NVG_ALIGN_LEFT | NVG_ALIGN_MIDDLE);
//nvgText(vg, 200, 200, sample, NULL);
//nvgFill(vg);
renderGraph(vg, 5, 5, &fps);
renderGraph(vg, 5 + 200 + 5, 5, &cpuGraph);
if (gpuTimer.supported)
renderGraph(vg, 5 + 200 + 5 + 200 + 5, 5, &gpuGraph);
nvgEndFrame(vg);
// Measure the CPU time taken excluding swap buffers (as the swap may wait for GPU)
cpuTime = glfwGetTime() - t;
updateGraph(&fps, dt);
updateGraph(&cpuGraph, cpuTime);
// We may get multiple results.
n = stopGPUTimer(&gpuTimer, gpuTimes, 3);
for (i = 0; i < n; i++)
updateGraph(&gpuGraph, gpuTimes[i]);
if (screenshot) {
screenshot = 0;
saveScreenShot(fbWidth, fbHeight, premult, "dump.png");
}
glfwSwapBuffers(window);
glfwPollEvents();
}
freeDemoData(vg, &data);
nvgDeleteGL3(vg);
printf("Average Frame Time: %.2f ms\n", getGraphAverage(&fps) * 1000.0f);
printf(" CPU Time: %.2f ms\n", getGraphAverage(&cpuGraph) * 1000.0f);
printf(" GPU Time: %.2f ms\n", getGraphAverage(&gpuGraph) * 1000.0f);
glfwTerminate();
return 0;
This Segment works in the implementation above.
nvgBeginPath(vg);
nvgFontSize(vg, 18.0f);
nvgFontFace(vg, "sans");
nvgTextAlign(vg, NVG_ALIGN_LEFT | NVG_ALIGN_MIDDLE);
nvgText(vg, 200, 200, sample, NULL);
nvgFill(vg);
It Doesnt Render in the implementation below.
GLFWwindow* window;
NVGcontext* vg = NULL;
//initializing GLFW
if (!glfwInit()) {
printf("Failed to init GLFW.");
return -1;
}
glfwSetErrorCallback(errorcb);
//creating a GL-Window
window = glfwCreateWindow(1366, 768, "NanoVG", NULL, NULL);
if (!window) {
glfwTerminate();
return -1;
}
//glfwGetCursorPos(window, &xpos, &ypos);
glfwSetKeyCallback(window, key);
glfwSetCursorPosCallback(window, cursor_position_callback);
glfwSetMouseButtonCallback(window, mouse_button_callback);
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK) {
printf("Could not init glew.\n");
return -1;
}
glGetError();
//initialize nanovg
vg = nvgCreateGL3(NVG_ANTIALIAS | NVG_STENCIL_STROKES | NVG_DEBUG);
if (vg == NULL) {
printf("Could not init nanovg.\n");
return -1;
}
glfwSwapInterval(0);
defaultColor = nvgRGBA(255, 255, 255, 255);
defaultWidth = 1.0f;
/*glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, 1366, 768, 0.0f, 0.0f, 1000.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef(-1366, -768, 0.0f);
glScalef(1.0, -1.0, 1.0f);*/
while (!glfwWindowShouldClose(window))
{
int winWidth, winHeight, pxRatio;
int fbWidth, fbHeight;
glfwGetCursorPos(window, &mx, &my);
glfwGetWindowSize(window, &winWidth, &winHeight);
glfwGetFramebufferSize(window, &fbWidth, &fbHeight);
pxRatio = (float)fbWidth / (float)winWidth;
glViewport(0, 0, winWidth, winHeight);
glClearColor(0.3f, 0.3f, 0.32f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
nvgBeginFrame(vg, winWidth, winHeight, pxRatio);
//doesnt work here
char sample[] = "Sample";
nvgBeginPath(vg);
nvgFontSize(vg, 18.0f);
nvgFontFace(vg, "sans");
nvgTextAlign(vg, NVG_ALIGN_LEFT | NVG_ALIGN_MIDDLE);
nvgText(vg, 500, 500, sample, NULL);
nvgFill(vg);
nvgEndFrame(vg);
glfwSwapBuffers(window);
glfwPollEvents();
iteration_Number++;
}
nvgDeleteGL3(vg);
glfwTerminate();
return 0;
Am i missing something basic? Sorry i am a beginner at nanovg and opengl? I have two of the mentioned snippets in two different functions and i execute only one at once.
Anyone?
2 Problems:
1. Had to initialise the font from the disk before i could use it in a rendering context.
nvgCreateFont(vg, "sans", ".\\example\\Roboto-Regular.ttf");
Included this line and worked fine.
I didnt read the Nanovg Documentation well, and understood it partially.

Missing faces when displaying STL data

I wrote a simple parser for the ASCII STL format. When I try to render the triangles with the supplied normals, the resulting object is missing many faces:
This is how it should look like:
What I already tried:
explicitly disabled backface culling (though it shouldn't have been active before)
ensured that the depth buffer is enabled
Here is a minimal sample program which reproduces the error:
#include <SDL2/SDL.h>
#include <SDL2/SDL_main.h>
#include <SDL2/SDL_render.h>
#include <SDL2/SDL_opengl.h>
int main(int argc, char **argv) {
SDL_Init(SDL_INIT_VIDEO);
int screen_w=1280,screen_h=720;
SDL_Window * win = SDL_CreateWindow("test", 20, 20, screen_w, screen_h,
SDL_WINDOW_OPENGL);
SDL_GLContext glcontext = SDL_GL_CreateContext(win);
STLParser stlparser;
std::ifstream file(".\\logo.stl");
stlparser.parseAscii(file);
const auto& ndata = stlparser.getNData();
const auto& vdata = stlparser.getVData();
std::cout << "number of facets: " << ndata.size() << std::endl;
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
glMatrixMode(GL_PROJECTION | GL_MODELVIEW);
glLoadIdentity();
glScalef(1.f, -1.f, 1.f);
glOrtho(0, screen_w, 0, screen_h, -screen_w, screen_w);
glClearDepth(1.0f);
glDepthFunc(GL_LEQUAL);
glEnable(GL_DEPTH_TEST);
glDisable(GL_CULL_FACE);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_VERTEX_ARRAY);
glNormalPointer(GL_FLOAT, 0, ndata.data());
glVertexPointer(3, GL_FLOAT, 0, vdata.data());
SDL_Event event;
bool quit = false;
while (!quit) {
while (SDL_PollEvent(&event))
switch(event.type) {
case SDL_QUIT: quit = true; break;
}
;
// Drawing
glClearColor(255,255,255,255);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glTranslatef(screen_w/2,0,0);
glRotatef(0.5,0,1,0);
glTranslatef(-screen_w/2,0,0);
glPushMatrix();
glTranslatef(screen_w/2,screen_h/2,0);
glColor3f(0.5,0.5,0);
glDrawArrays(GL_TRIANGLES, 0, vdata.size());
glPopMatrix();
SDL_GL_SwapWindow(win);
SDL_Delay(10);
}
SDL_DestroyWindow(win);
SDL_Quit();
return 0;
}
The STLParser methods getNData() and getVData() have the following signatures:
const std::vector<std::array<float,3>>& getNData() const;
const std::vector<std::array<std::array<float,3>,3>>& getVData() const;
STLParser output should be correct, but I can provide the sources as well if needed.
What am I doing wrong?
You should change
glDrawArrays(GL_TRIANGLES, 0, vdata.size());
to
glDrawArrays(GL_TRIANGLES, 0, 3 * vdata.size());
I.e. count should be vertex count, but not triangle count.

Trouble having SDL_TTF and OPENGL work together

For a game I'm working on, I'm hoping to use OpenGL for a lot of the graphics and SDL_TTF for the text. I can get both to work but not at the same time. Here's my code (based off Lazy Foo):
#include "SDL/SDL.h"
#include "SDL/SDL_ttf.h"
#include "GL/gl.h"
const bool useOpenGl = true;
//The surfaces
SDL_Surface *message = NULL;
SDL_Surface *screen = NULL;
//The event structure
SDL_Event event;
//The font that's going to be used
TTF_Font *font = NULL;
//The color of the font
SDL_Color textColor = {255, 255, 255};
void apply_surface(int x, int y, SDL_Surface* source, SDL_Surface* destination, SDL_Rect* clip = NULL)
{
//Holds offsets
SDL_Rect offset;
//Get offsets
offset.x = x;
offset.y = y;
//Blit
SDL_BlitSurface(source, clip, destination, &offset);
}
bool init()
{
SDL_Init (SDL_INIT_EVERYTHING);
if (useOpenGl)
{
screen = SDL_SetVideoMode (1280, 720, 32, SDL_SWSURFACE | SDL_OPENGL); //With SDL_OPENGL flag only opengl is sceen, without only text is
} else {
screen = SDL_SetVideoMode (1280, 720, 32, SDL_SWSURFACE);
}
TTF_Init();
SDL_WM_SetCaption ("TTF Not Working With OpenGL", NULL);
if (useOpenGl)
{
glClearColor(1.0, 0.0, 0.0, 0.0);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0, screen->w, screen->h, 1.0, -1.0, 1.0);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
}
return true;
}
bool load_files()
{
font = TTF_OpenFont ("arial.ttf", 28);
return true;
}
void clean_up()
{
SDL_FreeSurface (message);
TTF_CloseFont (font);
TTF_Quit();
SDL_Quit();
}
int main(int argc, char* args[])
{
//Quit flag
bool quit = false;
init();
load_files();
if (useOpenGl)
{
glClear(GL_COLOR_BUFFER_BIT); //clearing the screen
glPushMatrix();
glBegin(GL_QUADS);
glColor3f(1.0, 0.0, 0.0);
glVertex2f(0, 0);
glColor3f(0.0, 1.0, 0.0);
glVertex2f(1280, 0);
glColor3f(0.0, 0.0, 1.0);
glVertex2f(1280, 720);
glColor4f(0.5, 0.5, 1.0, 0.1);
glVertex2f(0, 720);
glEnd();
glPopMatrix();
glFlush();
}
//Render the text
message = TTF_RenderText_Solid (font, "The quick brown fox jumps over the lazy dog", textColor);
//Apply the images to the screen
apply_surface (0, 150, message, screen);
//I'm guessing this is where the problem is coming from
SDL_GL_SwapBuffers();
SDL_Flip (screen);
while (quit == false)
{
while (SDL_PollEvent (&event))
{
if (event.type == SDL_QUIT)
{
quit = true;
}
}
}
clean_up();
return 0;
}
If the variable useOpenGl is set to false the program will only use SDL_TTF, if it's set to true it will use both SDL_TTF and OpenGL.
From playing around with it the problem seems to be with whether or not I use the "SDL_OPENGL" flag when creating the window.
SDL_TTF uses software rendering and is not compatible with OpenGL mode.
You might have to look for another library such as FTGL or freetype-gl.

SDL openGL issue, no openGL drawing seen

I am using cygwin SDL 1.2.15 using the latest cygwin
Here is my code using SDL and openGL
#include <SDL/SDL.h>
#include <SDL/SDL_opengl.h>
#include <iostream>
size_t sx=600, sy=600, bpp=32;
void render(void) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity(); // set location in front of camera
//glTranslated(0, 0, -10);
glBegin(GL_QUADS); // draw a square
glColor3d(1, 0, 0);
glVertex3d(-2, 2, 0);
glVertex3d( 2, 2, 0);
glVertex3d( 2, -2, 0);
glVertex3d(-2, -2, 0);
glEnd();
glFlush();
SDL_GL_SwapBuffers();
GLenum e;
while ((e =glGetError()) != GL_NO_ERROR)
std::cout<<"Error "<< e << std::endl;
}
int input(void) {
SDL_Event event;
while (SDL_PollEvent(&event))
if (event.type == SDL_QUIT || (event.type == SDL_KEYUP && event.key.keysym.sym == SDLK_ESCAPE)) return 0;
return 1;
}
and this is my main function
int main(int argc, char *argv[]) {
SDL_Surface *surf;
if (SDL_Init(SDL_INIT_EVERYTHING) != 0) return 0;
if (!(surf = SDL_SetVideoMode(sx, sy, bpp, SDL_OPENGL))) return 0;
glViewport(0, 0, sx, sy);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45.0, (float)sx / (float)sy, 1.0, 100.0);
glMatrixMode(GL_MODELVIEW);
glClearColor(0, 0, 0, 1);
glClearDepth(1.0);
glEnable(GL_DEPTH_TEST);
GLenum e;
while ((e =glGetError()) != GL_NO_ERROR)
std::cout<<"Error "<< e << std::endl;
for (;;) {
if (!input()) break;
render();
SDL_Delay(10);
}
SDL_FreeSurface(surf);
SDL_Quit();
return 0;
}
it compiles with no error but when I run it only the window shows up and now openGL rectangle..
You have setup a near plane to one :
gluPerspective(45.0, (float)sx / (float)sy, 1.0/*near plane*/, 100.0);
Everything that is closer to the camera is clipped.
Your quad lies in plane z = 0. Try moving it a bit backward.
glBegin(GL_QUADS); // draw a square
glColor3d(1, 0, 0);
glVertex3d(-2, 2, 5);
glVertex3d( 2, 2, 5);
glVertex3d( 2, -2, 5);
glVertex3d(-2, -2, 5);
glEnd();
I don't remember if Z is facing the camera, so you might need negative Z value.
You also need to pay attention to face culling. It might be better to deactivate it to be sure ( glDisable( GL_CULL_FACE ))
Try changing the black color of the SDL window. Some times it renders the drawing with black color...may be this helps!