Set a image(jpeg | png) backgound using GLX ( opengl and Xlib) - c++

I create a x11 window with opengl functionality, i need to load a image(jpeg | png) on its background where size of window be bigger than image, it doesn't matter. I surfed to get resylts like use DevIL or free image. I dont know which one to use. I set up the opengl window with the sample code given in a link to sample code and i there i want to write code in void renderGL() so as to make the image as background. Can you tell me which image library to use and provode the code if possible.
And also what is to be done to plot color pixels in opengl. i need a function to draw a pixel in a window , for which i have to provide the x,y pixel position and rgb color alone(unsigned int).....

I'm not much of a opengl programmer, but somehow i did this
and its workinh
ilInit(); /* Initialization of DevIL */
ilGenImages(1, &texid); /* Generation of one image name */
ilBindImage(texid); /* Binding of image name */
success = ilLoadImage(backgroundimage); /* Loading of image "image.jpg" */
iWidth = ilGetInteger(IL_IMAGE_WIDTH);
iHeight = ilGetInteger(IL_IMAGE_HEIGHT);
if (success){
success = ilConvertImage(IL_RGBA, IL_UNSIGNED_BYTE); // Convert every colour component into unsigned byte,replace IL_RGB with IL_RGBA for alpha channel
}
glGenTextures(1, &image); /* Texture name generation */
glBindTexture(GL_TEXTURE_2D, image); /* Binding of texture name */
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, ilGetInteger(IL_IMAGE_BPP), ilGetInteger(IL_IMAGE_WIDTH),
ilGetInteger(IL_IMAGE_HEIGHT), 0, ilGetInteger(IL_IMAGE_FORMAT), GL_UNSIGNED_BYTE,
ilGetData()); /* Texture specification */
glRotatef(roll, 0.0f,0.0f, 10.0f);
glOrtho(0.0, float(width), float(height), 0.0, 0.0, 100.0);
glMatrixMode(GL_MODELVIEW);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClearDepth(0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glLoadIdentity(); // Reset The Modelview Matrix
glBindTexture(GL_TEXTURE_2D, image); // Select The First Image Texture
glBegin(GL_QUADS);// Start Drawing A Textured Quad
glTexCoord2i(0, 0); glVertex2f(width/2-iWidth/2,height/2-iHeight/2);
glTexCoord2i(0, 1); glVertex2f(width/2-iWidth/2,height/2+iHeight/2);
glTexCoord2i(1, 1); glVertex2f(width/2+iWidth/2,height/2+iHeight/2);
glTexCoord2i(1, 0); glVertex2f(width/2+iWidth/2,height/2-iHeight/2);
glEnd();

Related

Render FFmpeg AVFrame as OpenGL texture?

I'm attempting to to render a jpeg image (1024x1024 pixels) in the form of an FFmpeg AVFrame as a texture in OpenGL. What I get instead is something that appears as a 1024x1024 dark green quad:
The code to render the AVFrame data in OpenGL is shown below. I have convinced myself that the raw RGB data held within the FFmpeg AVFrame data is not solely dark green.
GLuint g_texture = {};
//////////////////////////////////////////////////////////////////////////
void display()
{
// Clear color and depth buffers
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW); // Operate on model-view matrix
glEnable(GL_TEXTURE_2D);
GLuint texture = g_texture;
glBindTexture(GL_TEXTURE_2D, texture);
// Draw a quad
glBegin(GL_QUADS);
glVertex2i(0, 0); // top left
glVertex2i(1024, 0); // top right
glVertex2i(1024, 1024); // bottom right
glVertex2i(0, 1024); // bottom left
glEnd();
glDisable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0);
glFlush();
}
/* Initialize OpenGL Graphics */
void initGL(int w, int h)
{
glViewport(0, 0, w, h); // use a screen size of WIDTH x HEIGHT
glEnable(GL_TEXTURE_2D); // Enable 2D texturing
glMatrixMode(GL_PROJECTION); // Make a simple 2D projection on the entire window
glOrtho(0.0, w, h, 0.0, 0.0, 100.0);
glMatrixMode(GL_MODELVIEW); // Set the matrix mode to object modeling
//glTranslatef( 0, 0, -15 );
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClearDepth(0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the window
}
//////////////////////////////////////////////////////////////////////////
int main(int argc, char *argv[])
{
std::shared_ptr<AVFrame> apAVFrame;
if (!load_image_to_AVFrame(apAVFrame, "marble.jpg"))
{
assert(false);
return 1;
}
// From here on out, the AVFrame is RGB interleaved
// and is sized to 1,024 x 1,024 (power of 2).
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGB | GLUT_SINGLE);
glutInitWindowSize(1060, 1060);
glutInitWindowPosition(0, 0);
glutCreateWindow("OpenGL - Creating a texture");
glGenTextures(1, &g_texture);
//glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glBindTexture(GL_TEXTURE_2D, g_texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, apAVFrame->width,
apAVFrame->height, 0, GL_RGB, GL_UNSIGNED_BYTE,
apAVFrame->data[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); /* We will use linear interpolation for magnification filter */
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); /* We will use linear interpolation for minifying filter */
initGL(1060, 1060);
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
Environment:
Ubuntu 18.04
GCC v8.2
EDIT: As per #immibis' suggestion below, it all works when I change the rendering of the quad to:
// Draw a quad
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex2i(0, 0); // top left
glTexCoord2f(1, 0);
glVertex2i(1024, 0); // top right
glTexCoord2f(1, 1);
glVertex2i(1024, 1024); // bottom right
glTexCoord2f(0, 1);
glVertex2i(0, 1024); // bottom left
glEnd();
You forgot to give your vertices texture coordinates, so all the pixels on your screen are reading the same pixel from the texture. (The top-left, or wherever the default texture coordinates are)
Use glTexCoord2f before glVertex2i to set the texture coordinates for the vertex. They go from 0 on the top/left of the texture, to 1 on the bottom/right, so the corners of the texture are 0,0, 1,0, 1,1 and 0,1.

OpenGL Immediate Mode texture draws as black box (in FLTK provided context)

I'm trying to display a 128x128 array of floats (0.0 -> 1.0) as a texture of green on black within a legacy FLTK application. The rest of the application uses immediate mode for all it's drawing, so I don't want to change the way it works. From reading tutorials, I've come up with the following calls that should be sufficient to load the texture into graphics memory, and draw it to the current context (managed by FLTK). However, all I can get it to do is draw a black square where the texture should be, drawing over anything else that has been drawn there (GL_LINES).
Is there anything basic that I'm missing here? some initialization step that I've neglected? Thanks in advance. Here the code that I'm trying to get running:
// Part of class init
glGenTextures(1, &tex);
// As far as I can tell, this is optional with FLTK having already
// set these parameters. Omitting them does not change output.
glPushMatrix();
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glMatrixMode(GL_MODEL);
glLoadIdentity();
// This is the meat of the problem. reports no GL errors.
glEnable(GL_TEXTURE_2D);
glShadeModel(GL_FLAT);
glBindTexture(GL_TEXTURE_2D, tex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, 128, 128, 0, GL_GREEN, GL_FLOAT, &image);
glBegin(GL_QUADS);
glTexCoord2d(0,0); glVertex2f(-0.9,-0.9);
glTexCoord2d(0,1); glVertex2f(-0.9, 0.9);
glTexCoord2d(1,0); glVertex2f( 0.9,-0.9);
glTexCoord2d(1,1); glVertex2f( 0.9, 0.9);
glEnd();
glDisable(GL_TEXTURE_2D);
// Again, busywork.
glPopMatrix();

How to transform SDL TTF Surface into GL Texture with Transparent background

I would like to draw text over content drawn in GL. I want the symbols themselves to be opaque, whilst the rest transparent, allowing the drawn content to be seen. The following code yields a text, which is the correct one, yet with a perfectly white background. My drawn content is completely absent.
How am I to solve this? I am using SDL 2.0, VSC
glPushMatrix();
/*Content drawn in GL*/
GLuint TextureID = 0;
SDL_Color Color = {30, 30, 30, 0};
TTF_Font * Font = TTF_OpenFont("Times.ttf", 30);
SDL_Surface * Message = TTF_RenderText_Blended(Font, "ASDASDASD", Color);
glGenTextures(1, &TextureID);
glBindTexture(GL_TEXTURE_2D, TextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, Message->w, Message->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, Message->pixels);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, TextureID);
glBegin(GL_QUADS);
{
glColor4f(.5, .5, .5, 1);
glTexCoord2f(0,0); glVertex2f(MouseX/10, MouseY/10);
glTexCoord2f(1,0); glVertex2f((MouseX/10) + (Message->w / 10), MouseY/10);
glTexCoord2f(1,1); glVertex2f((MouseX/10) + (Message->w / 10), (MouseY/10) + (Message->h / 10));
glTexCoord2f(0,1); glVertex2f(MouseX/10, (MouseY/10) + (Message->h / 10));
}
glEnd();
glPopMatrix();
SDL_FreeSurface(Message);
SDL_GL_SwapWindow(GameWindow);
Think of it this way: When you create the texture above, you're drawing dark grey fully transparent (30,30,30,0) text against a fully transparent background (0,0,0,0 due to TTF_RenderText_Blended). You're then using immediate mode color blending to raster a quad of the same size (HINT: with no blend func and no mention of z buffer disable!).
P.S. you're leaking like crazy in the example:
glGenTexture every frame without a glDeleteTextures (you only need a
new one if it changes)
TTF_OpenFont without a TTF_CloseFont in every
frame (keep it only until you do not require rendering of that font)
TTF_RenderText_Blended surface isn't leaked but you could free it right after
glTexImage2D
I assume you're meaning to use the font as a mask to subtract from a solid color background? I generally create font textures white (full alpha) then apply color or masking in the shader. To accomplish a similar effect in immediate mode you could:
// startup
glEnable(GL_BLEND);
glDisable(GL_DEPTH_TEST);
SDL_Color Color = {255, 255, 255, 255};
TTF_Font * Font = TTF_OpenFont("Times.ttf", 30);
SDL_Surface * Message = TTF_RenderText_Blended(Font, "ASDASDASD", Color);
TTF_CloseFont(Font);
glGenTextures(1, &TextureID);
glBindTexture(GL_TEXTURE_2D, TextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, Message->w, Message->h, 0, GL_RGBA, GL_UNSIGNED_BYTE, Message->pixels);
SDL_FreeSurface(Message);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// frame render
glClear(GL_COLOR_BUFFER_BIT);
// you'll see this in many (immediate mode) 2D rendering examples
// without the blend eq below, this would render the white text on top of the quad's background color
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
// use text as a mask to cut away from what is below
glBlendEquation(GL_FUNC_REVERSE_SUBTRACT);
glBindTexture(GL_TEXTURE_2D, TextureID);
glBegin(GL_QUADS);
{
glColor4f(.5, .5, .5, 1);
glTexCoord2f(0,0); glVertex2f(MouseX/10, MouseY/10);
glTexCoord2f(1,0); glVertex2f((MouseX/10) + (Message->w / 10), MouseY/10);
glTexCoord2f(1,1); glVertex2f((MouseX/10) + (Message->w / 10), (MouseY/10) + (Message->h / 10));
glTexCoord2f(0,1); glVertex2f(MouseX/10, (MouseY/10) + (Message->h / 10));
}
glEnd();
// if done every frame, make sure to: glDeleteTextures(1, &TextureID);
SDL_GL_SwapWindow(GameWindow);
Very handy site here: http://www.andersriggelsen.dk/glblendfunc.php

Draw polygons above background image

My question is to draw a polygon or some other OpenGL primitives above or over a background image. Summarizing like paint in different layers, but in OpenGL i think there is no layers.
Now i'm doing some test trying to draw a triangle and a line over the background image.
To draw the background i use a square with OpenGL window size and then apply the png image in this square as a texture.
After that I try to paint the triangle and the line with different colors but I don't see anything except the background image.
I play with the alpha channel but i don't see anything.
Code:
void init()
{
// glClearColor(0.0, 0.0, 0.0, 0.0);
glEnable(GL_DEPTH_TEST);
// The following two lines enable semi transparent
glEnable(GL_BLEND);
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
int width, height;
bool hasAlpha;
char filename[] = "prueba-luz.png";
bool success = loadPngImage(filename, width, height, hasAlpha, &textureImage);
if (!success)
{
cout << "Unable to load png file" << endl;
return;
}
cout << "Image loaded " << width << " " << height << " alpha " << hasAlpha << endl;
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexImage2D(GL_TEXTURE_2D, 0, hasAlpha ? 4 : 3, width,
height, 0, hasAlpha ? GL_RGBA : GL_RGB, GL_UNSIGNED_BYTE,
textureImage);
// glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
// glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
// glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
}
void display(void)
{
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE); // Igual que GL_REPLACE pero se le puede aplicar transparencias a la textura
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glPushMatrix();
glColor4f(1, 1, 1, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBegin(GL_QUADS);
glTexCoord2i(0,0); glVertex2i(10, -10);
glTexCoord2i(1,0); glVertex2i(-10, -10);
glTexCoord2i(1,1); glVertex2i(-10, 10);
glTexCoord2i(0,1); glVertex2i(10, 10);
glEnd();
glPopMatrix();
glDisable(GL_TEXTURE_2D);
glBegin(GL_POLYGON);
glColor4f(1.0, 1.0, 1.0, 0.8);
glVertex3f(-1.0f, -3.0f, 0.0f); // A
glVertex3f( 1.0f, -3.5f, 0.0f); // B
glVertex3f( 0.0f, 0.5f, 0.0f); // C
glEnd();
glBegin(GL_LINES);
glColor4f( 1.0, 0.0, 0.0, 0.8 );
glVertex2f(-8.0, 6.0);
glVertex2f (5.0, -3.0);
glEnd();
glutSwapBuffers();
}
How can i draw one image over the background image ???
Some things spotted :
Try to disable depth test. You are drawing everything at Z=0 and your application is definitely writing to the depth buffer. I'm not sure why you enable depth testing here since everything you draw is on Z=0.
By default all fragments written to the same or higher Z value will be discarded when depth testing is enabled. This can be changed with glDepthFunc (Default value GL_LESS). This is probably why your geometry is discarded in this example.
Drawing 2D geometry do not mean that the depth buffer will not be affected. OpenGL will draw this geometry with Z=0 and write that to your depth buffer.
Currently there is no reason for your application to use depth testing unless you draw your geometry on different z positions. You can use depth testing in 2D drawing to make "layers" by assigning different z values to each object.
A good practice for 2D drawing with z-culling is to draw the closes layer first, then draw the layers further in. This makes sure your write fewer fragments.
For example :
Z = 0 : Draw non-transparent overlays
Z = -1 : Draw stuff hidden behind the overlays
Z = -2 : Draw background
However, transparent objects need special rules.
You need to get control over your depth buffer. There are options :
glEnable/Disable(GL_DEPTH_TEST) : Enable or disable z-culling
glDepthMask(GL_TRUE/GL_FALSE) : Enable or disable writing to the the depth buffer
(glDepthFunc can be used to define how fragments are discarded)
Note here that the two first functions give you way more options than you might think
Depth culling and depth write when drawing objects
Depth culling, but your object do not affect the depth buffer
No depth culling, but your object will write to the depth buffer
No depth culling and no depth write
If you want to draw all your transparent objects in a last pass you can for example enable depth testing so the objects are not drawn on top your your overlays, but you disable depth writes because you want to be able to draw several overlapping objects in that layer.
Use your creativity and learn to love the depth buffer :D

Problems rendering to a texture using FBO in OS X 10.7 but not Linux

I have a test program that programmatically renders a texture and then renders a simple quad that is skinned with the previously drawn texture. The program is extremely simple and everything is done in 2D. This program works great under linux and everything looks like it should:
However when I run the program on my mac running 10.7 nothing shows up:
I'm at a complete loss as to why this program isn't working on my mac.
Here are the relevant bits of the program.
Creating the texture:
void createTextureObject(){
cout<<"Creating a new texture of size:"<<textureSize<<endl;
//glDeleteTextures(1, &textureId);
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_TRUE); // automatic mipmap generation included in OpenGL v1.4
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, textureSize, textureSize, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
isTextureValid = true;
createFBObject();
}
Creating the Frame Buffer Object:
void createFBObject(){
cout<<"Creating a new frame buffer object"<<endl;
glDeleteFramebuffers(1, &fboId);
glGenFramebuffersEXT(1, &fboId);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fboId);
glGenRenderbuffersEXT(1, &rboId);
glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, rboId);
glRenderbufferStorageEXT(GL_RENDERBUFFER_EXT, GL_DEPTH_COMPONENT, textureSize, textureSize);
glBindRenderbufferEXT(GL_RENDERBUFFER_EXT, 0);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT, GL_COLOR_ATTACHMENT0_EXT, GL_TEXTURE_2D, textureId, 0);
glFramebufferRenderbufferEXT(GL_FRAMEBUFFER_EXT, GL_DEPTH_ATTACHMENT_EXT, GL_RENDERBUFFER_EXT, rboId);
bool status = checkFramebufferStatus();
if(!status){
cout<<"FrameBufferObject not created! Quitting!"<<endl;
exit(1);
fboUsed = false;
}
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
}
Rendering to the texture:
void drawToTexture(){
if (!isTextureValid)
createTextureObject();
glViewport(0, 0, textureSize, textureSize);
glLoadIdentity();
// set the rendering destination to FBO
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fboId);
// clear buffer
glClearColor(0, 0, 0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// draw to the texture
glColor3f(0.0, 1.0, 0.0);
for (int i=1; i<=5; i++){
glBegin(GL_LINE_LOOP);
glVertex2f(-1 * i/5.0f, -1 * i/5.0f);
glVertex2f( 1 * i/5.0f, -1 * i/5.0f);
glVertex2f( 1 * i/5.0f, 1 * i/5.0f);
glVertex2f(-1 * i/5.0f, 1 * i/5.0f);
glEnd();
}
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
}
Rendering the textured quad:
void drawTexturedQuad(){
glViewport(50, 50, textureSize, textureSize);
glLoadIdentity();
glClearColor(0.2, 0.2, 0.2, 0.2);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, textureId);
int size = 1;
int texS = 1;
glBegin(GL_QUADS);
glColor4f(1, 1, 1, 1);
glTexCoord2f(texS, texS); glVertex3f(size, size,0);
glTexCoord2f(0, texS); glVertex3f(-1 * size , size,0);
glTexCoord2f(0, 0); glVertex3f(-1 * size, -1 * size,0);
glTexCoord2f(texS, 0); glVertex3f(size, -1 * size,0);
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
}
I'm using GLUT and my display call back is simply:
void displayCB(){
drawToTexture();
drawTexturedQuad();
glutSwapBuffers();
}
Additionally I have other methods check to see if FrameBufferObjects are supported by OpenGL and if they are not the program quits. Additionally I have other logic that sets textureSize whenever the window is resized.
glTexParameteri(GL_TEXTURE_2D, GL_GENERATE_MIPMAP, GL_TRUE); // automatic mipmap generation included in OpenGL v1.4
This might be a problem. The GL specification doesn't say exactly at what point mipmaps should be generated, and of course, this generates problems with render to texture. So the GL_EXT_framebuffer_object (and core versions too) introduced glGenerateMipmapEXT, which you call exactly in the point where you want mipmaps generated (usually after rendering to the texture).
So, remove the GL_GENERATE_MIPMAP stuff and use glGenerateMipmap manually when needed. You can read more about this problem in the GL_EXT_framebuffer_object specification.
Are you sure that your buffer id is 0 and not 1?
Maybe another FBO was already created before.
Also check texture dimensions try make them power of 2