Does SFML support transparency? - opengl

Hi I am trying to draw a translucent sphere using OpenGL / glut;
Easiest thing I think is to post the code: This is what I have at the moment;
glutInit(&argc, argv);
sf::ContextSettings settings;
settings.depthBits = 32;
settings.stencilBits = 0;
settings.antialiasingLevel = 0;
sf::Window window(sf::VideoMode(800, 600), "insert title", sf::Style::Default, settings);
window.setVerticalSyncEnabled(true);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
glClearColor(0.0,0.0,0.0,0.0);
bool running = true;
while(running)
{
sf::Event e;
while(window.pollEvent(e))
{
if(e.type == sf::Event::Closed)
{
running = false;
}
if(e.type == sf::Event::Resized)
{
glViewport(0, 0, e.size.width, e.size.height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45.0, (double)e.size.width / (double)e.size.height, 0.1, 2000.0);
gluLookAt(0,0,0, 1,0,0, 0,1,0);
glMatrixMode(GL_MODELVIEW);
}
}
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
glColor4d(1.0, 0.0, 0.0, 0.5);
glutWireSphere(0.5, 4, 4);
window.display();
}
Ran this thing under optirun and without optirun... No transparency though!
I read somewhere the context must have an alpha buffer... Does SFML support this?

Your code is very confused.
FreeGLUT and SFML do the same thing with regard to OpenGL: they create windows with OpenGL contexts. You should not be using them both in the same application.
Also, you can't call OpenGL functions until you've created the window. And those functions will only affect the current context. Those functions will have no effect on the new context you create with SFML. Your code simply doesn't make sense.

Related

when I use OpenGL with glut, why it shows nothing when I close a window and reopen it?

I use freeglut to create windows, and the code goes like this:
int window1, window2;
GLfloat cube[] = {
//cube point
}
init2()
{
//init shaders...
//init vertex arrays with cube points...
}
void display2()
{
glutSetWindow(window2);
glViewport(0, 0, WIDTH, HEIGHT);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
glEnable(GL_DEPTH_TEST);
mul.use();
glm::mat4 view;
view = camera.GetViewMatrix();
glm::mat4 projection = glm::perspective(camera.Zoom, (GLfloat)WIDTH / (GLfloat)HEIGHT, 0.1f, 100.0f);
mul.setUniform("view", view);
mul.setUniform("projection", projection);
glBindVertexArray(box_vao2);
glDrawArrays(GL_QUADS, 0, 24);
glBindVertexArray(0);
glUseProgram(0);
glutSwapBuffers();
}
openwin2()
{
window2 = glutCreateWindow("win2");
init2();
glutDisplayFunc(display2);
glutReshapeFunc(reshape2);
glutIdleFunc(idle2);
}
void mouse(int button, int state, int x, int y)
{
//do something and...
openwin2();
}
int main(int argc, char **argv)
{
//glutinit...
window1 = glutCreateWindow("window1");
init();
glutDisplayFunc(display);
glutReshapeFunc(reshape);
glutMouseFunc(mouse);
glutIdleFunc(idle);
glutMainLoop();
//...
}
So, when I click and open the window2 the first time, it can display the cube I draw with vao, but if I close it and reopen it, the window does nothing, why?
Is it something about the vao?
I reuse the shader created by the previous window context, so the second window shows nothing because the shader doesn't belong to it.

SDL not displaying shape

Within my project, I've been having trouble getting an triangle to display within OnRender(), but for some reason, nothing other than the background color (green) is visible.
int main(int argc, char **argv)
{
if (!OnInit())
return -1;
SDL_Event Event;
while (_isRunning)
{
while (SDL_PollEvent(&Event))
OnEvent(&Event);
OnRender();
OnLoop();
SDL_GL_SwapWindow(_screen);
}
OnCleanup();
return 0;
}
void generalSetup()
{
// Initialize SDL2
if (SDL_Init(SDL_INIT_VIDEO) < 0)
sdldie("Failed to initial SDL2.");
else
{
/* Request OpenGL 3.2 */
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE);
// Create window
_screen = SDL_CreateWindow("Window", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED,
800, 600, SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN);
// Create Context
_mainContext = SDL_GL_CreateContext(_screen);
// Create Surface
_surface = SDL_GetWindowSurface(_screen);
SDL_FillRect(_surface, NULL, SDL_MapRGB(_surface->format, 0xCC, 0x20, 0x20));
SDL_UpdateWindowSurface(_screen);
/* swap synchronized */
SDL_GL_SetSwapInterval(1);
// Initialize GLew 1.10
glewExperimental = GL_TRUE;
GLenum error = glewInit();
if (error != GLEW_OK)
printf("Warning: Unable to set VSync! SDL Error: %s\n", SDL_GetError());
else
std::cout << "GLew Initialized" << std::endl;
glClearColor(0, 1, 0, 0);
glViewport(0, 0, 800, 600);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(45, 800 / 600, 1, 1000);
gluLookAt(0, 0, 20, 0, 0, 0, 0, 1, 0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
}
}
bool OnInit()
{
generalSetup();
return true;
}
void OnEvent(SDL_Event* Event)
{
if (Event->type == SDL_QUIT)
_isRunning = false;
}
void OnLoop()
{
}
void OnRender()
{
glClearColor(1.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glPushMatrix();
glTranslatef(0.f, 0.f, -10.f);
glBegin(GL_TRIANGLES);
glColor3f(0.1, 0.2, 0.3);
glVertex3f(0, 0, 0);
glVertex3f(1, 0, 0);
glVertex3f(0, 1, 0);
glEnd();
glPopMatrix();
}
void OnCleanup()
{
SDL_GL_DeleteContext(_mainContext);
SDL_DestroyWindow(_screen);
SDL_Quit();
}
You requested a Core context. None of your immediate-mode (matrix stack, glBegin(), etc.) code will work.
Drop back to a compatibility context (SDL_GL_CONTEXT_PROFILE_COMPATIBILITY) or supply all the necessary shaders, vertex buffers, etc. that Core requires.
You are trying to render through the immediate mode, but it is not supported by the OpenGL 3.2. Try using the version 2.0 or 2.1, which support both shaders (if you are intending to use them) and the immediate mode.

Changing resolution in OpenGL doesn't change the actual size of textures drawn

I have an OpenGL application using SDL, and I'm experiencing strange behavior when resizing a window. For example, here is a normal looking window at 1400x900:
If I increase the resolution to 1600x900, I get black bars with the screen size being effectively the same:
If I reduce the resolution to 1280x1024, I only get a part of the image:
However, if I exit the application and restart it manually, the application behaves as normal with the image being drawn at the correct size. The numbers here are for examples, the problem remains no matter what the initial and final resolutions.
void App :: Load()
{
int music = 100, effects = 100;
XMLDoc settings("res/settings.xml");
if(settings.ready())
{
rapidxml::xml_node<char> *node = settings.Doc()->first_node("settings");
if(NodeValid(node))
{
if(NodeValid("screen",node))
gScreenSettings.Load(node->first_node("screen"));
if(NodeValid("volume",node))
{
rapidxml::xml_node<char> *volnode = node->first_node("volume");
LoadNum(music,"music",volnode);
LoadNum(effects,"effects",volnode);
}
gFilePath.Load(node->first_node("paths"));
}
}
//Start the sound subsystem
pyrodactyl::music::gMusicManager.Init(music, effects);
}
bool App :: Init(bool load)
{
//Load all SDL subsystems and the TrueType font subsystem
if( SDL_Init( SDL_INIT_TIMER | SDL_INIT_AUDIO | SDL_INIT_VIDEO | SDL_INIT_JOYSTICK ) == -1 || TTF_Init() == -1)
{
fprintf(stderr,"Couldn't initialize SDL :(");
return false;
}
if(load)
Load();
//Set the window caption
SDL_WM_SetCaption("Unrest", "Unrest");
//Set the window icon
SDL_Surface *iconimage = SDL_LoadBMP(gFilePath.icon.c_str());
Uint32 colorkey = SDL_MapRGB(iconimage->format, 255, 0, 255);
SDL_SetColorKey(iconimage, SDL_SRCCOLORKEY, colorkey);
SDL_WM_SetIcon(iconimage,NULL);
//Initialize the music thread
if(music == NULL)
music = SDL_CreateThread(MusicThread, NULL);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_STENCIL_SIZE,4);
SDL_GL_SetAttribute(SDL_GL_ACCELERATED_VISUAL,1);
//Set keyboard repeat rate
SDL_EnableKeyRepeat(SDL_DEFAULT_REPEAT_DELAY/4, SDL_DEFAULT_REPEAT_INTERVAL);
//Store the default desktop values before starting our own screen
gScreenSettings.desktop.w = SDL_GetVideoInfo()->current_w;
gScreenSettings.desktop.h = SDL_GetVideoInfo()->current_h;
//Set up the screen
screen = SDL_SetVideoMode(gScreenSettings.cur.w, gScreenSettings.cur.h, gScreenSettings.bpp, gScreenSettings.videoflags);
if(screen == NULL)
return false;
// initialize GLEW
GLenum status = glewInit();
if (status != GLEW_NO_ERROR)
{
fprintf(stderr, "Failed to initialize GLEW %d: %s\n", status, glewGetErrorString(status));
return false;
}
//Initialize and load input
pyrodactyl::input::gInput.Init();
pyrodactyl::input::gInput.Load(gFilePath.controls);
//Enable 2d textures
glEnable(GL_TEXTURE_2D);
glEnable(GL_ARB_texture_non_power_of_two);
//Enable transparency in textures, set the blend function
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA);
glStencilOp(GL_KEEP,GL_REPLACE,GL_REPLACE);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClearStencil(0);
glClear(GL_COLOR_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0.0f, gScreenSettings.cur.w, gScreenSettings.cur.h, 0.0f, -1.0f, 1.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
//Disable the SDL stock cursor
SDL_ShowCursor(SDL_DISABLE);
//Enable Unicode for text input in text area
SDL_EnableUNICODE( SDL_ENABLE );
//Seed the random function
srand(static_cast<unsigned int>(time(NULL)));
//Apply vsync settings
gScreenSettings.Vsync();
return true;
}
void App :: Reset(bool load)
{
using namespace pyrodactyl::image;
//Delete stuff
gImageManager.Quit();
pyrodactyl::text::gTextManager.Reset();
gLoadScreen.Quit();
//Reload stuff
Init(load);
gLoadScreen.Load();
gImageManager.Init();
gImageManager.tileset.Reload();
}
The Reset() function is called whenever I need to change resolution. Please help me solve this problem.
My guess is that you need glViewport in your reset function.

Corrupt-looking Accumulation Buffer output

I am in the process of building a simple 3D game engine that is built on top of OpenGL, and for windowing and I/O, GLUT. I have run into a problem with the OpenGL accumulation buffer when trying to build a motion-blur option into the engine. Essentially, here is the small block of code that is supposed to do this for me:
glAccum(GL_MULT, 0.99f);
glAccum(GL_ACCUM, 1.0f - 0.99f);
glAccum(GL_RETURN, 1.0f);
I first tried this block of code by planting it in my Render() method, but it showed a corrupt-looking view where only a select few pixels were visible. So, I then tried it with the rest of the source from the website from which I found the code. I still got the same issue. Below is an image of the issue:
Then, I just took out the accumulation buffer portion (the three lines that are supposed to achieve the motion blur), and here is what I got:
Of course, there would be no motion blur since I removed the glAccum() lines, but that at least told me there is either a problem with my graphics card (it doesn't like accumulation buffers?) or those lines of code don't work.
I don't know if it matters, but I am running the code through NetBeans 7.2 (C++) on a MacBook Pro from 2011. Also, I did request an accumulation buffer in the following line:
glutInitDisplayMode(GLUT_DEPTH | GLUT_ACCUM | GLUT_DOUBLE | GLUT_RGBA);
Here is a sample piece of code I just threw together. I'm not sure if something is wrong in the code, and I know I probably didn't use best practices either, but it gets the point across. I still experienced the error with this code:
#include <iostream>
#include <GLUT/GLUT.h>
using namespace std;
float Rotation = 0.0f;
void Reshape(int width, int height)
{
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(-1, 1, -1.0f * ((float)height / (float)width), 1.0f * ((float)height / (float)width), 0.1f, 200.0f);
glMatrixMode(GL_MODELVIEW);
}
void Update(int value)
{
Rotation++;
glutPostRedisplay();
glutTimerFunc(17, Update, 0);
}
void InitGL()
{
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glEnable(GL_COLOR_MATERIAL);
glClearDepth(100.0f);
}
void Render(void)
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
gluLookAt(0, 0, 5.0f, 0, 0, 0, 0, 1, 0);
glPushMatrix();
{
glRotatef(Rotation, 0.0, 1.0, 0.0);
/* Render Icosahedron */
glColor3f(0.5f, 0.5f, 0.5f);
glutSolidIcosahedron();
/* Render wireframe */
glColor4f(1.0, 1.0, 1.0, 1.0);
glLineWidth(2.0);
glutWireIcosahedron();
}
glPopMatrix();
/* Blur */
glAccum(GL_MULT, 0.99);
glAccum(GL_ACCUM, 0.01);
glAccum(GL_RETURN, 1.0);
glFlush();
glutSwapBuffers();
}
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGBA | GLUT_ACCUM);
glutInitWindowSize(400, 400);
glutCreateWindow("Test");
glutDisplayFunc(Render);
glutReshapeFunc(Reshape);
InitGL();
Reshape(400, 400);
glutTimerFunc(17, Update, 0);
glutMainLoop();
return 0;
}

gluLookAt() makes blank screen

I don't have much OpenGL experience. I am trying to draw a teapot and move a camera around the teapot. To this end I am using the gluLookAt function. The problem is that when I call gluLookAt the screen is blank and I can't see my teapot.
#include "openGLer.h"
void openGLer::simulate(grid* toSim, int* argc, char** argv)
{
myGrid = toSim;
glutInit(argc, argv);
glutInitDisplayMode(GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGBA);
glutInitWindowSize(400, 400); //Set the window size
glutCreateWindow("");
glutDisplayFunc(display);
glutIdleFunc(display);
glutKeyboardFunc(handleKeypress);
glEnable(GL_DEPTH_TEST);
glutMainLoop();
}
void openGLer::handleKeypress(unsigned char key, //The key that was pressed
int x, int y)
{
switch (key)
{
case 27: exit(0);
}
}
void openGLer::camera()
{
gluLookAt(3, 3, 0,
0, 0, 0,
0, 1, 0
);
}
void openGLer::draw()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity();
camera();
glutWireTeapot(0.5);
glutSwapBuffers();
}
void openGLer::display()
{
draw();
}
Why does gluLookAt() make the screen blank and how do I fix this? When camera() is not called code performs as expected; with a teapot being displayed.
Have you set up your projection matrix correctly? Otherwise, your call to gluLookAt will cause the teapot to be too far away and therefore be clipped by the far plane.
Try adding this to your initialization code (and also your resize handler to fix the aspect ratio when the window is resized). I've set the far plane at 100, which should be plenty for your teapot.
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective (60.0, width/(float)height, 0.1, 100.0);
glMatrixMode(GL_MODELVIEW);