OpenGL images not displaying properly with some window sizes - c++

I'm new to OpenGL and am trying to display images. I've got the "Load image from file" part done and am now trying to bind it to my shape (GL_TRIANGLES). Sometimes the image get's displayed correctly, but sometimes it does not. I've realized that it depends on the window size, so if I resize the window I can get a clear image. Here are some screenshots of how the screen looks:
Working:
Broken:
Here's my draw loop:
void display() {
// tick the frame counter
frame++;
start_time = clock();
// update screen
int width = glutGet(GLUT_WINDOW_WIDTH);
int height = glutGet(GLUT_WINDOW_HEIGHT);
update_screen(width, height);
// clear screen
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT);
glPushMatrix();
glClear(GL_COLOR_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, png::get_textures()[1]);
glEnable(GL_TEXTURE_GEN_S);
glEnable(GL_TEXTURE_GEN_T);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glTexGeni(GL_S, GL_TEXTURE_GEN_MODE, GL_OBJECT_LINEAR);
glTexGeni(GL_T, GL_TEXTURE_GEN_MODE, GL_OBJECT_LINEAR);
glBegin(GL_TRIANGLES);
glVertex2f(1000, 1000);
glVertex2f(0, 1000);
glVertex2f(0, 0);
glVertex2f(1000, 1000);
glVertex2f(1000, 0);
glVertex2f(0, 0);
glEnd();
glDisable(GL_TEXTURE_2D);
glPopMatrix();
glFlush();
glutSwapBuffers();
elapsed_time = ((clock() - start_time) / (double(CLOCKS_PER_SEC) / 10000));
// tick all entities
player.tick(elapsed_time);
glutPostRedisplay();
}
Also, if I remove glEnable(GL_TEXTURE_GEN_S) or glEnable(GL_TEXTURE_GEN_T); or both, the image gets completely blue or gray.
Edit: Oh, and I'm using PNG files, in case that maters.

Well, I feel sheepish!
I forgot to add glTexCoord2f...
hear's my new file just in case someone else forgot their brain somewhere. :)
void display() {
// tick the frame counter
frame++;
start_time = clock();
// update screen
int width = glutGet(GLUT_WINDOW_WIDTH);
int height = glutGet(GLUT_WINDOW_HEIGHT);
update_screen(width, height);
//glMatrixMode(GL_MODELVIEW);
//glLoadIdentity();
// clear screen
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT);
glPushMatrix();
glClear(GL_COLOR_BUFFER_BIT);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, png::get_textures()[3]);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glBegin(GL_QUADS);
glTexCoord2f(0, 0); glVertex2f(0, 0);
glTexCoord2f(1, 0); glVertex2f(1000, 0);
glTexCoord2f(1, 1); glVertex2f(1000, 1000);
glTexCoord2f(0, 1); glVertex2f(0, 1000);
glEnd();
glDisable(GL_TEXTURE_2D);
glPopMatrix();
glFlush();
glutSwapBuffers();
elapsed_time = ((clock() - start_time) / (double(CLOCKS_PER_SEC) / 10000));
// tick all entities
player.tick(elapsed_time);
glutPostRedisplay();
}

Related

Render FFmpeg AVFrame as OpenGL texture?

I'm attempting to to render a jpeg image (1024x1024 pixels) in the form of an FFmpeg AVFrame as a texture in OpenGL. What I get instead is something that appears as a 1024x1024 dark green quad:
The code to render the AVFrame data in OpenGL is shown below. I have convinced myself that the raw RGB data held within the FFmpeg AVFrame data is not solely dark green.
GLuint g_texture = {};
//////////////////////////////////////////////////////////////////////////
void display()
{
// Clear color and depth buffers
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_MODELVIEW); // Operate on model-view matrix
glEnable(GL_TEXTURE_2D);
GLuint texture = g_texture;
glBindTexture(GL_TEXTURE_2D, texture);
// Draw a quad
glBegin(GL_QUADS);
glVertex2i(0, 0); // top left
glVertex2i(1024, 0); // top right
glVertex2i(1024, 1024); // bottom right
glVertex2i(0, 1024); // bottom left
glEnd();
glDisable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0);
glFlush();
}
/* Initialize OpenGL Graphics */
void initGL(int w, int h)
{
glViewport(0, 0, w, h); // use a screen size of WIDTH x HEIGHT
glEnable(GL_TEXTURE_2D); // Enable 2D texturing
glMatrixMode(GL_PROJECTION); // Make a simple 2D projection on the entire window
glOrtho(0.0, w, h, 0.0, 0.0, 100.0);
glMatrixMode(GL_MODELVIEW); // Set the matrix mode to object modeling
//glTranslatef( 0, 0, -15 );
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClearDepth(0.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Clear the window
}
//////////////////////////////////////////////////////////////////////////
int main(int argc, char *argv[])
{
std::shared_ptr<AVFrame> apAVFrame;
if (!load_image_to_AVFrame(apAVFrame, "marble.jpg"))
{
assert(false);
return 1;
}
// From here on out, the AVFrame is RGB interleaved
// and is sized to 1,024 x 1,024 (power of 2).
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGB | GLUT_SINGLE);
glutInitWindowSize(1060, 1060);
glutInitWindowPosition(0, 0);
glutCreateWindow("OpenGL - Creating a texture");
glGenTextures(1, &g_texture);
//glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glBindTexture(GL_TEXTURE_2D, g_texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, apAVFrame->width,
apAVFrame->height, 0, GL_RGB, GL_UNSIGNED_BYTE,
apAVFrame->data[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); /* We will use linear interpolation for magnification filter */
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); /* We will use linear interpolation for minifying filter */
initGL(1060, 1060);
glutDisplayFunc(display);
glutMainLoop();
return 0;
}
Environment:
Ubuntu 18.04
GCC v8.2
EDIT: As per #immibis' suggestion below, it all works when I change the rendering of the quad to:
// Draw a quad
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex2i(0, 0); // top left
glTexCoord2f(1, 0);
glVertex2i(1024, 0); // top right
glTexCoord2f(1, 1);
glVertex2i(1024, 1024); // bottom right
glTexCoord2f(0, 1);
glVertex2i(0, 1024); // bottom left
glEnd();
You forgot to give your vertices texture coordinates, so all the pixels on your screen are reading the same pixel from the texture. (The top-left, or wherever the default texture coordinates are)
Use glTexCoord2f before glVertex2i to set the texture coordinates for the vertex. They go from 0 on the top/left of the texture, to 1 on the bottom/right, so the corners of the texture are 0,0, 1,0, 1,1 and 0,1.

OpenGL, SOIL, Textures and Transparent backgrounds

I can't seem to get the transparent background on my image to load as transparent. It's always white instead. I'm still fairly new to OpenGL/SOIL so if I did something stupid don't be mad :/ Does it have to do with my flag on the load being RGB instead of RGBA?
void Scene::loadTexShips()
{
texShips[0] = SOIL_load_OGL_texture(
"Textures/Carrier.png",
SOIL_LOAD_LA,
SOIL_CREATE_NEW_ID,
SOIL_FLAG_MIPMAPS | SOIL_FLAG_NTSC_SAFE_RGB
);
if (texShips[0] == 0)
{
printf("SOIL loading error: '%s'\n", SOIL_last_result());
}
}
void Scene::drawShips()
{
glColor4ub(255, 255, 255, 255);
glEnable(GL_TEXTURE_2D);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glLoadIdentity();
glTranslatef(6.0f, 2.0f, -12.0f);
glBindTexture(GL_TEXTURE_2D, texShips[0]);
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex3f(-4.0f, -3.0f, 2.0);
glTexCoord2f(0, 1);
glVertex3f(-4.0f, -2.3, 2.0);
glTexCoord2f(1, 1);
glVertex3f(-1.0, -2.3, 2.0);
glTexCoord2f(1, 0);
glVertex3f(-1.0, -3.0, 2.0);
glEnd();
glDisable(GL_TEXTURE_2D);
}

openGL - testure mapping cube fails

I am trying something quite easy, normally: applying a texture on the different surfaces of a cube.
I am able to apply it but it seems as if he just takes an average of the colors of my image.
why please?
my code:
void MyGLWidget::drawCube()
{
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
glLoadIdentity();
// glPushMatrix();
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
glTranslatef( 0.5, 0, 0.0);
glRotatef(getCubeAngle(), 0.0f, 1.0f, 0.0f);
glTranslatef(0, 0, 0);
glDisable(GL_TEXTURE_GEN_S);
glDisable(GL_TEXTURE_GEN_T);
glBindTexture(GL_TEXTURE_2D, texture[0]);
glBegin(GL_QUADS);
//back
glVertex3f(-0.1, 0.1,-0.1 );//upper left corner
glVertex3f(0.1, 0.1,-0.1); //uper right
glVertex3f(0.1,-0.1,-0.1 ); // down left
glVertex3f(-0.1,-0.1,-0.1); // down right
/* other code to create rest of the cube*/
glEnd();
glFlush();
// glPopMatrix();
}
void MyGLWidget::resizeGL(int width, int height)
{
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glScalef(height *1./width, 1.0, 1.0);
glMatrixMode(GL_MODELVIEW);
}
void MyGLWidget::myTextureMapping()
{
QImage t;
QImage b;
if(!b.load("..../myImage.bmp"))
{qDebug("error with image\n");}
t = QGLWidget::convertToGLFormat(b);
glGenTextures( 1, &texture[0] );
glBindTexture( GL_TEXTURE_2D, texture[0] );
glTexImage2D( GL_TEXTURE_2D, 0, 3, t.width(), t.height(), 0, GL_RGBA, GL_UNSIGNED_BYTE, t.bits() );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
}
void MyGLWidget::initializeGL()
{
myTextureMapping();
glEnable(GL_TEXTURE_2D);
glShadeModel(GL_SMOOTH);
glClearColor(0.0f, 0.0f, 0.0f, 0.5f);
glClearDepth(1.0f);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
}
EDIT:
added those tex coordinates:
glTexCoord2f(-0.1, 0.1);
glVertex3f(-0.1, 0.1,0 );//upper left corner
glTexCoord2f(0.1, 0.1);
glVertex3f(0.1, 0.1,0); //uper right
glTexCoord2f(0.1, -0.1);
glVertex3f(0.1,-0.1,0 ); // down left
glTexCoord2f(-0.1, -0.1);
glVertex3f(-0.1,-0.1,0); // down right
But my image is bigger than the face of my cube:
source image : http://imgur.com/h48QARM
result in software: http://imgur.com/rxvK0Ot
You should be providing the texture co-ordinates for each vertex. What you have right now is just a position data for the Quad, texture co-ordinates are missing.
Have a look at this :
OpenGL Textured Cube Not Being Mapped Correctly
Try this :
glTexCoord2f(0, 0);
glVertex3f(-0.1, 0.1,0 );//upper left corner
glTexCoord2f(1, 0);
glVertex3f(0.1, 0.1,0); //uper right
glTexCoord2f(0, 1);
glVertex3f(-0.1,-0.1,0 ); // down left
glTexCoord2f(1, 1);
glVertex3f(0.1,-0.1,0); // down right
Isn't the texture coordinates wrong? To me it seems like you're going -0.1 to 0.1, while texture coordinates normally are defined in the interval [0,1].

Low OpenGL alpha values cropping

I've injected a DLL into a game process to make a overlay interface, but the problem is that alpha values are being "cropped" (not rendering at all)
I've tested several alpha values and it seems to fail if alpha is below 0.3.
To illustrate what happens, the image that I'm trying to render is:
and the game redering the image is:
What is exactly happening here? Its the current state of opengl? I'm new to the API, and I have no idea why this happens.
More information:
The texture is being created from a buffer with:
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, this->width, this->height, GL_BGRA_EXT, GL_UNSIGNED_BYTE, this->buffer);
I receive this buffer from Awesomium, and the values are right... I've checked the alpha values.
The rendering is done using this function (I've tried calling game's texture rendering function too but the same problem happens):
void DrawTextureExt(int texture, float x, float y, float width, float height)
{
glPushAttrib(GL_ALL_ATTRIB_BITS);
{
glPushMatrix();
{
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
// glBlendFunc(GL_ONE, GL_ONE); << tried this too.. ugly results
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, texture);
glTranslatef(x, y, 0.0);
glRotatef(0, 0.0, 0.0, 1.0);
glTranslatef(-x, -y, 0.0);
glBegin(GL_QUADS);
{
glTexCoord2f(0, 0); glVertex2f(x, y);
glTexCoord2f(0, 1); glVertex2f(x, y + height);
glTexCoord2f(1, 1); glVertex2f(x + width, y + height);
glTexCoord2f(1, 0); glVertex2f(x + width, y);
}
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
}
glPopMatrix();
}
glPopAttrib();
}
Sound to me like the program you're hooking into is using alpha tests at the end of it's rendering function (which would make sense) and left alpha testing enabled at the end, which you are then running into. Try what happens if you disable the alpha test first thing in your hook. (glDiable(GL_ALPHA_TEST)).

Black screen in LWJGL

I'm currently porting a game of which the code is very obfuscated due to porting from C to Java.
My problem is that some users report a black screen and no other problems (sound is working fine e.g.), with no errors showing a problem. On my pc it runs fine, and it makes for a hell of debugging.
I was wondering if anyone can post a (list of) reason(s) this might be occurring. I've read somewhere one of the issues could be using a 32 bits Java on a 64 bits system.
My code below, also opensourced at: https://code.google.com/p/jake2t/
private void renderSideBySide() {
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, sbsFboId);
// Render side by side
glPushAttrib(GL_ALL_ATTRIB_BITS);
glPushMatrix();
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0, width, 0, height);
glMatrixMode (GL_MODELVIEW);
glLoadIdentity ();
glViewport(0,0,width,height);
glClearColor(0.0f, 0.0f, 1.0f, 0.0f);
glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glDisable(GL_BLEND);
glDisable(GL_ALPHA_TEST);
glDisable(GL_CULL_FACE);
int shaderId = sbsShader.getId();
glDisable(GL_TEXTURE_2D);
ARBShaderObjects.glUseProgramObjectARB(shaderId);
if (postFboTextureLocation[0] < 0) {
postFboTextureLocation[0] = ARBShaderObjects.glGetUniformLocationARB(shaderId, "leftTexture");
}
if (postFboTextureLocation[1] < 0) {
postFboTextureLocation[1] = ARBShaderObjects.glGetUniformLocationARB(shaderId, "rightTexture");
}
if (postFboDepthTextureLocation[0] < 0) {
postFboDepthTextureLocation[0] = ARBShaderObjects.glGetUniformLocationARB(shaderId, "leftDepthTexture");
}
if (postFboDepthTextureLocation[1] < 0) {
postFboDepthTextureLocation[1] = ARBShaderObjects.glGetUniformLocationARB(shaderId, "rightDepthTexture");
}
// Load the images with the colors and the depth values
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, postFboTextureId[0]);
ARBShaderObjects.glUniform1iARB(postFboTextureLocation[0], 0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, postFboTextureId[1]);
ARBShaderObjects.glUniform1iARB(postFboTextureLocation[1], 1);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, postFboDepthTexture[0]);
ARBShaderObjects.glUniform1iARB(postFboDepthTextureLocation[0], 2);
glActiveTexture(GL_TEXTURE3);
glBindTexture(GL_TEXTURE_2D, postFboDepthTexture[1]);
ARBShaderObjects.glUniform1iARB(postFboDepthTextureLocation[1], 3);
glBegin (GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex2i (0, 0);
glTexCoord2f(1.0f, 0.0f);
glVertex2i (width, 0);
glTexCoord2f(1.0f, 1.0f);
glVertex2i (width, height);
glTexCoord2f(0.0f, 1.0f);
glVertex2i (0, height);
glEnd();
ARBShaderObjects.glUseProgramObjectARB(0);
// Rendering with warping
glPopMatrix();
glPopAttrib();
unbindFBO();
}
public void drawPostFBOs() {
renderSideBySide();
glPushAttrib(GL_ALL_ATTRIB_BITS);
glPushMatrix();
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluOrtho2D(0, width, 0, height);
glMatrixMode (GL_MODELVIEW);
glLoadIdentity ();
glViewport(0,0,width,height);
glClearColor(0.0f, 0.0f, 1.0f, 0.0f);
glClear (GL_COLOR_BUFFER_BIT);
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glDisable(GL_BLEND);
glDisable(GL_ALPHA_TEST);
glDisable(GL_CULL_FACE);
glDisable(GL_TEXTURE_2D);
int shaderId = riftShader.getId();
ARBShaderObjects.glUseProgramObjectARB(shaderId);
if (sbsFboTextureLocation < 0) {
sbsFboTextureLocation = ARBShaderObjects.glGetUniformLocationARB(shaderId, "tex");
}
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, sbsFboTextureId);
ARBShaderObjects.glUniform1iARB(sbsFboTextureLocation, 0);
glBegin (GL_QUADS);
glTexCoord2f(0.0f, 0.0f);
glVertex2i (0, 0);
glTexCoord2f(1.0f, 0.0f);
glVertex2i (width, 0);
glTexCoord2f(1.0f, 1.0f);
glVertex2i (width, height);
glTexCoord2f(0.0f, 1.0f);
glVertex2i (0, height);
glEnd();
ARBShaderObjects.glUseProgramObjectARB(0);
glPopMatrix();
glPopAttrib();
}
The common problems that OpenGL 3+ programmer doesn't consider when coding are:
OpenGL Extensions
use of ARB and EXT extension.
use of correct OGL extension.
use multiple implementation of different OGL extensions. Here is an example using C++:
#ifndef _MESA_INVERT_
/*** ... some codes here ... ***/
#else
/*** ... alt some codes here ... ***/
#endif
Check if the video card support the following extension using:
glxinfo - Linux and MacOS X based system.
glview - Windows based system.