I use OpenGL and SDL2 to render spine animations. In a specific z-order this animations are disposed like white blocks. All texture get white. I guess this error is in OpenGL draw code.
glPushMatrix();
float texw=0, texh=0;
if (texture) {
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
if (SDL_GL_BindTexture(texture, &texw, &texh) != 0)
printf("WTF\n");
}
glEnableClientState(GL_VERTEX_ARRAY);
glColor4f(color[0], color[1], color[2], color[3]);
glVertexPointer(2, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, uvs);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
// if (num_vertices > 0) {
glDrawElements(GL_TRIANGLES, num_indices, GL_UNSIGNED_SHORT, indices);
glDisableClientState(GL_VERTEX_ARRAY);
// glDisableClientState(GL_COLOR_ARRAY);
if (texture) {
SDL_GL_UnbindTexture(texture);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}
glColor4f(1.0, 1.0, 1.0, 1.0);
glPopMatrix();
This is my code, some one see something wrong in this code ?
Why i'm getting white textures ?
Two-dimensional texturing has to be enabled by glEnable(GL_TEXTURE_2D) and can be disabled by glDisable(GL_TEXTURE_2D).
If texturing is enables then the texture wich is currently bound when the geometry is drawn is wrapped on the geometry.
If texturing is enabled, then by default the color of the texel is multiplied by the current color, because by default the texture environment mode (GL_TEXTURE_ENV_MODE) is GL_MODULATE. See glTexEnv.
This causes that the color of the texels of the texture is "mixed" by the last color which you have set by glColor4f.
glEnable(GL_TEXTURE_2D);
glDrawElements(GL_TRIANGLES, num_indices, GL_UNSIGNED_SHORT, indices);
glDisable(GL_TEXTURE_2D);
Note that all of this only applies in immediate mode if you are not using a shader program.
Related
This image is rendered using three passes.
In the first pass, I render a three axis.
in the second pass a transparent cylinder is rendered (glEnable(GL_BLEND)) alpha = 0.5f.
finally the golden and grey spheres are rendered in the third pass(glEnable(GL_BLEND)).
The alpha value of the golden spheres = 1.0f and the grey sphere = 0.2f.
The problem:
As you can see,
the cylinder overlaps the spheres even though we enable blending.
the axes overlap the cylinder and the spheres!
Here is my code:
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClearDepthf(1.0f);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LEQUAL);
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glEnable(GL_CULL_FACE);
glFrontFace(GL_CCW);
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
here the data is prepared and sent to shaders (first pass)
glDrawElements(GL_POINTS, 256, GL_UNSIGNED_INT, reinterpret_cast<void*>(0));
ps: a geometry shader is used to render lines from the given points.
Then we prepare and pass the cylinder data
glEnable(GL_BLEND);
glCullFace(GL_FRONT);
glDrawElements(GL_POINTS, 256, GL_UNSIGNED_INT, reinterpret_cast<void*>(0));
glCullFace(GL_BACK);
glDrawElements(GL_POINTS, 256, GL_UNSIGNED_INT, reinterpret_cast<void*>(0));
glDisable( GL_BLEND);
ps: a geometry shader, also, is used to render the mesh of the cylinders from the given points.
Finally, I render the golden sphere and the grey sphere in one pass
glEnable(GL_BLEND);
glDrawElements(GL_LINE_STRIP, goldenSphereNumber, GL_UNSIGNED_INT, (void*)0);
glDrawElements(GL_LINE_STRIP, sphereIndexCount, GL_UNSIGNED_INT, (void*)0);
glDisable( GL_BLEND);
ps: here also a geometry shader is used to render the mesh of the cylinders from the given lines.
Do you see any wrong? Could you help, please?
Normally this code renders the image of the texture to screen. But if I now add the command glBindFramebuffer(GL_FRAMEBUFFER,0) to the code, it will not render anything. It renders the screens just in the color of glClearColor. I am working with QT so using QOpenGLWidget.
glViewport(0, 0, _width, _height);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glClearColor(0.0,0.0,0.0,1.0);
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT|GL_STENCIL_BUFFER_BIT);
glDisable(GL_DEPTH_TEST);
_program.bind();
glBindVertexArray(_vao);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, textureID);
glUniform1i(glGetUniformLocation(_program.programId(), "u_texture"), 1);
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindVertexArray(0);
Want can go possibly wrong?
EDIT:
/* final.fsh */
# version 330 core
uniform sampler2D u_texture;
in vec4 qt_TexCoord0;
out vec4 fragColor;
void main(void)
{
fragColor = texture(u_texture, qt_TexCoord0.xy);
}
/* final.vsh */
# version 330 core
layout (location = 0) in vec3 a_position;
out vec4 qt_TexCoord0;
void main(void)
{
gl_Position = vec4(a_position, 1.0);
const mat4 B = mat4(0.5, 0.0, 0.0, 0.0,
0.0, 0.5, 0.0, 0.0,
0.0, 0.0, 0.5, 0.0,
0.5, 0.5, 0.5, 1.0);
qt_TexCoord0 = B * vec4(a_position, 1.0);
}
/* Loads VAO to GPU */
GLfloat max_ = 1.0;
GLfloat min_ = -1.0;
GLfloat vert[] = {
max_, max_, 0,
max_, min_, 0,
min_, max_, 0,
max_, min_, 0,
min_, min_, 0,
min_, max_, 0,
};
glGenVertexArrays(1, &_vao);
glBindVertexArray(_vao);
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vert), vert, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(GLfloat)*3, (GLvoid*)0);
glBindVertexArray(0);
As stated by G.M., and according to the docs, glBindFramebuffer(GL_FRAMEBUFFER, 0) "breaks the existing binding of a framebuffer object to the target". You need this call after having used your framebuffers to do some offscreen rendering. When you want to finally render to the screen, you have to render the the back buffer, by calling glDrawBuffer(GL_BACK).
So your render loop might look like
// Do some rendering stuff in n offscreen textures
glBindFramebuffer(GL_FRAMEBUFFER, the_buffer);
glDrawBuffers(n, buffers);
// Rendering calls
// Finally use the results for the final rendering
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glDrawBuffer(GL_BACK);
// Rendering calls
EDIT:
It appears you are using QOpenGLWidget as your OpenGL provider / windowing system. The Qt guys are using framebuffers to render their UI, and the content of the result of your QOpenGLWidget is drawn inside an FBO.
So, what it means (at least in my experience, there might be a better way to tackle this problem), is that you cannot render in the backbuffer anymore, you have to render in Qts framebuffer.
One way to do this (again, it might not be the best way at all, but it has worked for me), is to save the bound framebuffer at the begining of your rendering, and do your final rendering in this buffer. Which leads to
// First of all, save the already bound framebuffer
GLint qt_buffer;
glGetIntegerv(GL_FRAMEBUFFER_BINDING, &qt_buffer);
// Do some rendering stuff in n offscreen textures
glBindFramebuffer(GL_FRAMEBUFFER, your_buffer);
glDrawBuffers(n, buffers);
// Rendering calls
// Finally use the results for the final rendering
if (glIsFramebuffer(qt_buffer))
{
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glDrawBuffer(GL_BACK);
}
else
{
glBindFramebuffer(GL_FRAMEBUFFER, qt_buffer);
glDrawBuffers(1, buffers);
}
// Rendering calls
I have the same issue and finally I find the cause is glBindFramebuffer(GL_FRAMEBUFFER, 0);
QOpenGLWidget's default framebuffer may not be 0, so you need to get it by calling QOpenGLContext::defaultFramebufferObject()
so change like this:
glViewport(0, 0, _width, _height);
glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebufferObject()); //<-- change here
glClearColor(0.0,0.0,0.0,1.0);
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT|GL_STENCIL_BUFFER_BIT);
glDisable(GL_DEPTH_TEST);
_program.bind();
glBindVertexArray(_vao);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, textureID);
glUniform1i(glGetUniformLocation(_program.programId(), "u_texture"), 1);
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindVertexArray(0);
Solved by adding glDepthFunc(GL_ALWAYS);
I'm trying to blend two textures on a heightfield with glDrawElements. Normalpointer and vertexpointer data are the same but the TexCoordPointer are different for the two textures. No matter which BlendFunc I try there's always only one of the textures visible although large parts of texture_two are transparent. Does glDrawElements work with gl_blend or am I doing it wrong?
glEnable(GL_BLEND);
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
glBlendEquation(GL_FUNC_ADD);
glDepthFunc(GL_ALWAYS);
glNormalPointer(GL_FLOAT, 0, normals);
glVertexPointer(3, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, texture_ind_one);
glBindTexture(GL_TEXTURE_2D, texture_one);
glDrawElements(GL_TRIANGLES, indicies_num, GL_UNSIGNED_INT, indices);
glNormalPointer(GL_FLOAT, 0, normals);
glVertexPointer(3, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, texture_ind_two);
glBindTexture(GL_TEXTURE_2D, texture_two);
glDrawElements(GL_TRIANGLES, indicies_num, GL_UNSIGNED_INT, indices);
Thank you very much!
You need to set an appropriate depth function via glDepthFunc using something that has "equals" in it (probably GL_LEQUAL), since you are drawing twice at the same depth.
You can also consider blending the textures yourself inside a fragment shader.
With the same code as in my previous question Rendering quad with tiling image? I don't understand why the triangle is not being rendered on top of the textured quad.
Can someone point out what am I missing?
You have depth test enabled which defaults to less (only pixels that are closer get drawn).
If you want a background then disable depth writing during the first pass.
void GLViewer::paintGL()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glDepthMask(GL_FALSE);
glDisable(GL_DEPTH_TEST);
m_backgroundShader.bind();
glBindVertexArray(m_backgroundVAO);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_textureID);
glUniform1i(glGetUniformLocation(m_backgroundShader.programId(),"tex"),0);
glDrawArrays(GL_TRIANGLE_STRIP,0,4);
glDepthMask(GL_TRUE);
glEnable(GL_DEPTH_TEST);
m_triangleShader.bind();
glBindVertexArray(m_VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
update();
}
I am working on a n-body code which models the dynamics of a stellar disk. In the rendering, there are two types of particles : "classic" particles ( whites in the below image) and "dark matter" particles (in blue).
Here's this image at the start of simulation :
Everything seems to be ok with transparency but if I zoom during the run, I notice that actually, some particles keeps the same intermediate color, i.e there are purple.
Here's an example on this image ( which is the stellar disk seen from the side) :
My main problem is so that I don't understand why the color doesn't change as a function of others particles which are behind. For example, I would like a white/blue particle to be partially shaded by the others blue/white particles, and in real-time.
I show you my drawPoints() function where I use transparency :
void drawPoints()
{
glEnable(GL_POINT_SPRITE);
glTexEnvi(GL_POINT_SPRITE, GL_COORD_REPLACE, GL_TRUE);
glEnable(GL_VERTEX_PROGRAM_POINT_SIZE_NV);
glEnable(GL_BLEND);
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
//glEnable( GL_DEPTH_TEST );
glUseProgram(m_program);
glUniform1f( glGetUniformLocation(m_program, "pointRadius"), m_particleRadius );
glUniform1f( glGetUniformLocation(m_program, "pointScale"), m_pointScale );
GLuint vbo_disk;
glBindBuffer(GL_ARRAY_BUFFER, vbo_disk);
glVertexPointer(4, GL_DOUBLE, 4*sizeof(double), pos);
glEnableClientState(GL_VERTEX_ARRAY);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
glDrawArrays(GL_POINTS, 0, numBodies_disk);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glDisableClientState(GL_VERTEX_ARRAY);
GLuint vbo_halo;
glBindBuffer(GL_ARRAY_BUFFER, vbo_halo);
glVertexPointer(4, GL_DOUBLE, 4*sizeof(double), &pos[numBodies_disk]);
glEnableClientState(GL_VERTEX_ARRAY);
glColor4f(0.0f, 0.0f, 1.0f, 0.5f);
glDrawArrays(GL_POINTS, 0, numBodies_halo);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glDisableClientState(GL_VERTEX_ARRAY);
glDisable(GL_BLEND);
glDisable(GL_POINT_SPRITE);
}
I tried to use glEnable( GL_DEPTH_TEST ) but it draws 2D texture with black background squares.
Could you give me some clues to have this cumulated and partial transparency in real-time ?
Make sure you disable depth testing:
glDisable( GL_DEPTH_TEST );
Then, you may want to try different blending modes such as additive blending:
glBlendFunc( GL_SRC_ALPHA, GL_ONE );
While additive blending is really pretty, it may produce too much white which could defeat the purpose of this visualization. You may try lowering alpha values in glColor4f. Another solution would be to use blue and red particles to accentuate the difference.