GLSL sampler2DRect and single channel (GL_RED) data - opengl

I have pixel map data 1 channel, 8 bit.
I have pixel map width and height.
I'm trying to submit pixmap data to fragment shader.
I'm using ogl3 with VAO and VBO.
My setup:
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_RECTANGLE, texture);
glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexImage2D(GL_TEXTURE_RECTANGLE, 0, GL_RED, width, height, 0, GL_RED, GL_UNSIGNED_BYTE, data);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, ..., vertices, GL_STATIC_DRAW);
...create program...
glUseProgram(program);
glUniform1i(glGetUniformLocation(program, "image"), 0);
glDrawArrays(GL_TRIANGLE_FAN, 0, ...);
And fragment shader:
uniform sampler2DRect image;
varying vec2 varying_texcoord;
void main() {
vec4 sample = texture2DRect(image, varying_texcoord);
gl_FragColor = vec4(1.0, 0.0, 0.0, sample.a);
}
gl_FragColor should paint pixels light and dark red depending of sample.a value, however it seems that sample.a is always 1.0 - I'm getting pure red #ff0000.
I think the problem is in glTexImage2D, isn't it?
Please assume program and data are valid.

I believe the answer you are looking for is in the documentation for glTexImage2D. It says
"GL_RED:
Each element is a single red component.The GL converts it to floating point and assembles it into an RGBA elementby attaching 0 for green and blue, and 1 for alpha. Each component is then multiplied by the signed scale factor GL_c_SCALE, added to the signed bias GL_c_BIAS, and clamped to the range [0,1]."
You're asking for the .a component, which is always 1. You need to use the component the texture actually contains data in - which is .r
Hope this helps!

The question is very old but the answer could help others.
You need to use true pixel coordinates if you use sampler2DRect.
I assume you copy the incoming texture coordinates in the vertex shader to your varying_texcoord.
If you do so, you need to compute the coordinates:
vec2 coords = varying_texcoord * imageSize(image);
Then pass the coordinates:
vec4 sample= texture(image, coords);

Related

OpenGL Depth Texture is the same as Color Texture

I'm trying to visualize the depth texture attached to a custom fbo in opengl. I expected to see a "foggy" looking, greyscale output - the problem is that the output seems like it's the same as the color texture - I mean it's the exact same color texture. Am I actually rendering the same thing into 2 different textures?
Creating the depth texture:
glGenTextures(1, &m_DepthTxId);
glBindTexture(GL_TEXTURE_2D, m_DepthTxId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, width, height, 0, GL_DEPTH_COMPONENT, GL_UNSIGNED_BYTE, (void*)nullptr);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
Creating the fbo & attaching the color/depth textures:
glGenFramebuffers(1, &m_Id);
glBindFramebuffer(GL_FRAMEBUFFER, m_Id);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, m_ColorTxId, 0);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, m_DepthTxId, 0);
m_DrawBuffers[0] = GL_COLOR_ATTACHMENT0;
glDrawBuffers(1, m_DrawBuffers);
The interesting thing is that without adding the depth texture, the objects are displayed in the order they were rendered (scene looks messed up) - but when I add the depth texture, everything looks fine as you would expect with depth testing.
But how come when I give the depth texture to the shader it just displays the color-texture? Am I rendering color data into my depth texture, or..?
Apologies, but I'm fairly new to working with fbos :P
Additional info:
Rendering the fbo-textures onto a quad:
glActiveTexture(GL_TEXTURE0); // Color
glBindTexture(GL_TEXTURE_2D, m_Fbo->getColorTxId()); // ColorTexture ID
glUniform1i(m_Shader->getColorSampler(), 0); // ColorSampler Location
glActiveTexture(GL_TEXTURE1); // Depth
glBindTexture(GL_TEXTURE_2D, m_Fbo->getDepthTxId()); // DepthTexture ID
glUniform1i(m_Shader->getDepthSampler(), 0); // DepthSampler Location
glBindVertexArray(m_Fbo->getVaoId());
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, (void*)0);
glBindVertexArray(0);
Getting the sampler locations from the shader:
m_Location_ColorTxSampler = glGetUniformLocation(m_ProgramId, "colorSampler");
m_Location_DepthTxSampler = glGetUniformLocation(m_ProgramId, "depthSampler");
Shader:
in vec2 uv;
uniform sampler2D colorSampler;
uniform sampler2D depthSampler;
out vec4 color;
void main()
{
color = vec4(texture2D(depthSampler, uv).rgb, 1.0);
}
To me the whole thing seems correct, unless the 2 samplers are at the same location.. I'm sure that's not possible
If you bind the depth texture to unit 1...
glActiveTexture(GL_TEXTURE1); // Depth
glBindTexture(GL_TEXTURE_2D, m_Fbo->getDepthTxId()); // DepthTexture ID
... you should not tell the shader to sample from unit 0:
glUniform1i(m_Shader->getDepthSampler(), 0); // DepthSampler Location

OpenGL - Unable to write to frame buffer (or read it properly)

So I've been trying to get shadow mapping to work, but I was unsuccessful, and I am now trying to simply write anything to the frame buffer and then render it to a quad as a texture. I've been looking at this small piece of code for 10 hours, so I thought it might finally be time to ask for some help. Do let me know if you need any more information or if something is unclear.
I started by following this tutorial. After completing it two times, and straight copy pasting a third time I gave up, and started to look elsewhere for information. My current code is a bit of a mess, but I have tried to extract what is crucial
Here is how I set up my FBO with a TEXTURE2D (_shadowMap and _shadowMapFBO are private variables in the class acting as the main program):
glGenTextures(1, &_shadowMap);
glBindTexture(GL_TEXTURE_2D, _shadowMap);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT32, SHADOW_WIDTH, SHADOW_HEIGHT, 0, GL_DEPTH_COMPONENT, GL_FLOAT, NULL);
glGenFramebuffers(1, &_shadowMapFBO);
glBindFramebuffer(GL_FRAMEBUFFER, _shadowMapFBO);
glDrawBuffer(GL_NONE);
glReadBuffer(GL_NONE);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_2D, _shadowMap, 0);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
{
std::cout << "Frame buffer failed" << std::endl;
system("pause");
exit(1);
}
glBindFramebuffer(GL_FRAMEBUFFER, 0);
I then do the first render pass in the main loop:
glViewport(0, 0, SHADOW_WIDTH, SHADOW_HEIGHT);
glBindFramebuffer(GL_FRAMEBUFFER, _shadowMapFBO);
glClear(GL_DEPTH_BUFFER_BIT);
_shadowShader.bind();
glBindVertexArray(_cubeVAO);
glDrawElements(GL_TRIANGLES, _numberOfIndicesCube, GL_UNSIGNED_SHORT, 0);
glBindVertexArray(_planeVAO);
glDrawElements(GL_TRIANGLES, _numberOfIndicesPlane, GL_UNSIGNED_SHORT, 0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glDrawBuffer(GL_BACK);
glReadBuffer(GL_BACK);
Initially I wanted to create a shadow map, but at this point I'm simply trying to write an explicit value to the entire texture attached to the frame buffer using the following shader pair (the one used by _shadowShader)
Vertex
#version 450
in layout(location=0) vec3 position;
uniform mat4 mvp;
void main()
{
gl_Position = mvp * vec4(position, 1.0f);
}
Fragment
#version 450
out float depth;
void main()
{
depth = 0.0f;
}
I then finish up by trying to display the texture on a quad:
glViewport(0, 0, _screenWidth, _screenHeight);
glClearColor(0.7f, 0.5f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
_testShader.bind();
glBindTexture(GL_TEXTURE_2D, _shadowMap);
glBindVertexArray(_quadVAO);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, 0);
Here is the fragment shader for the quad:
#version 450
in vec2 textureCoordinates;
uniform sampler2D _shadowMap;
out vec4 color;
void main()
{
float depth = texture2D(_shadowMap, textureCoordinates).r;
color = texture2D(_shadowMap, textureCoordinates);
}
But to my continuous frustration, the quad appears white although I output 0.0f (black) from the fragment shader...I know the quad is able to display a texture, as I've been able to render a normal texture displaying a .jpg onto it. So, this is where I'm at now; any ideas, pointers, thoughts, motivational words etc?
EDIT I: So after some more time I have at least figured out that the texture is loaded correctly and that I'm able to read from it. I did this by actually loading a image onto it during initialization and then not writing to it. The quad then displays the texture correctly. So there must be something wrong with how I'm writing to it.
EDIT II: So I got it working; I sat down with a TA and copy pasted the code (yeah, I know...doesn't get much better than that) and it now works. Thanks Bart :)
Your shadow pass fragment shader is just wrong:
#version 450
out float depth;
void main()
{
depth = 0.0f;
}
This does just declare a single channel color output, which will be written to the color attachment (which you don't have). Since you want to write to the depth attachment of your FBO, you have to use the builtin gl_FragDepth output variable. If you don't do it, the GL will write the lineariliy interpolated window space depth value to the depth buffer.
Note that typically, you clear the depth buffer to 1.0, and when using a typical perspective projection, your objects would appear very close to 1.0, say at 0.99something, as the depth value, so it is very likely that converting the result to just 8 bit grayscale will look all white.

Issue with using Sampler2D with a Shader

! I am aware that there are easier ways to draw to the screen than this, but i need to do it this specific way !
I am drawing to a texture through a fbo.
I am then using a shader to re-draw it onto the screen, doing so with a sampler2D (of the texture) and having the shader set gl_fragColor to the color of the sampler2D at the specific point the fragment shader is at.
The issue i am having is that the display buffer (which the shader is drawing the fbo to) is only a solid blue, even though i draw a white squares to the fbo with a blue background.
My Code:
Main Render Loop:
while(!Display.isCloseRequested()){
//Drawing on the fbo a blue background and white square
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fboID);
glColor3f(0,0,1);
drawQuad(0,0,WIDTH,HEIGHT);
glColor3f(1,1,1);
drawQuad(0,0,50,50);
//Trying to draw the texture from the previous fbo
//on to the display buffer with a fragment shader,
//however it only draws a blue background, no white square.
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
GL13.glActiveTexture(GL13.GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D,texID);
glUseProgram(shaderProgram);
glUniform1i(glGetUniformLocation(shaderProgram, "tex"), 0);
glUniform1f(glGetUniformLocation(shaderProgram, "width"), WIDTH);
glUniform1f(glGetUniformLocation(shaderProgram, "height"), HEIGHT);
glBegin(GL_QUADS); {
glVertex2f(0, 0);
glVertex2f(0, HEIGHT);
glVertex2f(WIDTH, HEIGHT);
glVertex2f(WIDTH, 0);
} glEnd();
glUseProgram(0);
Display.update();
}
My Shader:
uniform sampler2D tex;
uniform float width;
uniform float height;
void main() {
vec4 color = texture2D( tex, gl_FragCoord.xy / vec2(width, height));
gl_FragColor = color;
}
Init method for the fbo:
texID=glGenTextures();
fboID=glGenFramebuffersEXT();
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, fboID);
glBindTexture(GL_TEXTURE_2D, texID);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, WIDTH, HEIGHT, 0,GL_RGBA, GL_INT, (java.nio.ByteBuffer) null);
glFramebufferTexture2DEXT(GL_FRAMEBUFFER_EXT,GL_COLOR_ATTACHMENT0_EXT,GL_TEXTURE_2D, texID, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glBindFramebufferEXT(GL_FRAMEBUFFER_EXT, 0);
I think one reason why this is happening could be becuase i am not correctly passing the Sampler2D to the shader but i not sure how to fix it.
If any could tell why this is happening or how to fix it (or even just a point in the right direction) it would be much appreciated!
(Sorry for any bad English!)
You have transposed your calls to glUniform1i (...) and glUseProgram (...).
At the time that glUniform1i(glGetUniformLocation(shaderProgram, "tex"), 0); is executed, the currently active program is 0 and that should in fact be generating the following error:
GL_INVALID_OPERATION is generated if there is no current program object.
If you swap the two lines mentioned above, that should fix your problem. In the future you should make a point of checking glGetError (...) when something does not work.
UPDATE: Now that the original problem is fixed, you are not setting the texture minification filter correctly.
To set enumerants, you must use glTexParameteri (...). glTexParameterf (...) is going to interpret the value passed as a floating-point number and GL_LINEAR (0x2601) has type GLenum (32-bit unsigned integer). Fortunately all core OpenGL enums only use the lower 16-bits so they can be expressed precisely as 32-bit floats (can express all integers up to 224), but you can see why you would not want to convert an integer constant to a float.
Therefore, the following line:
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
Needs to be:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
UPDATE 2: You must use normalized texture coordinates when using texture2D (...).
gl_FragCoord.xy has the range [0, width] and [0, height], which is outside the normalized range for any framebuffer larger than 1x1. To fix that, you must divide by width and height.
texelFetch (...) allows you to use unnormalized coordinates, but it requires GLSL 1.30 and you would have to cast gl_FragCoord.xy to ivec2 to use it. It may also prove inconvenient if you ever draw into an FBO that has different dimensions than your window.

OpenGL - texelFetch fetches nothing

I'm trying to draw a textured plane following the OpenGL SuperBible 6th ed. but for some reason I fail.
Here's my texture initialization code.
GLuint texture;
glGenTextures(1, &texture);
glActiveTexture(GL_TEXTURE0);
int w = 256;
int h = 256;
glBindTexture(GL_TEXTURE_2D, texture);
glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGBA32F, w, h);
float * data = new float[w * h * 4];
//This just creates some image data
generateTexture(data, w, h);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_FLOAT, data);
delete [] data;
This is the plane object. The object itself is drawn, just untextured.
glGenBuffers(1, &planeBuffer);
glBindBuffer(GL_ARRAY_BUFFER, planeBuffer);
glBufferData(GL_ARRAY_BUFFER,
sizeof(planePositions),
planePositions,
GL_STATIC_DRAW);
These are my vertex and fragment shaders.
#version 430 core
layout (location = 0) in vec3 position;
uniform mat4 proj, view;
void main(void){
gl_Position = proj * view * vec4 (position, 1.0);
}
#version 430 core
uniform sampler2D s;
out vec4 frag_color;
void main () {
frag_color = texelFetch(s, ivec2(gl_FragCoord.xy), 0);
};
I draw like this
glUseProgram(textureProgram);
GLuint projLocation = glGetUniformLocation (textureProgram, "proj");
glUniformMatrix4fv (projLocation, 1, GL_FALSE, projectionSource);
GLuint viewLocation = glGetUniformLocation (textureProgram, "view");
glUniformMatrix4fv (viewLocation, 1, GL_FALSE, viewSource);
glBindBuffer(GL_ARRAY_BUFFER, planeBuffer);
GLuint positionLocation = glGetAttribLocation(textureProgram, "position");
glVertexAttribPointer (positionLocation, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray (positionLocation);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
GLuint ts = glGetUniformLocation (textureProgram, "s");
glUniform1i(ts, 0);
glDrawArrays(GL_TRIANGLES, 0, 6);
glDisableVertexAttribArray (positionLocation);
//Afterwards I draw more geometry with other shaders. This shows correctly
glUseProgram(shaderProgram);
//Bind buffers, matrices, drawarrays, etc
But I just get a black untextured plane. If I override the frag_color assignment by adding another line afterwards, like so
frag_color = vec4(1.0);
it works, i.e. I get a white plane, so the shaders seem to be working correctly.
I don't get any errors whatsoever from glGetError().
Compatibility:
OpenGL version supported: 4.2.12337 Compatibility Profile Context 13.101
GLSL version supported: 4.30
The data array does contain values between 0 and 1. I have also tried hard-coding some random coordinates into the texelFetch() function, but I always get a black plane. It looks as though the sampler2D contained nothing but zeroes. I have also tried hard-coding the values contained in data to 1.0, 255.0, but nothing.
Either the book fails to mention something or I am missing something stupid. Why does the texture refuse to show on the plane?
EDIT: I added some code to the drawing part. The rest of the geometry that I draw (with different shaders) shows perfectly. None of it uses textures.
I finally got my way around this, although it is not clear what the problem is exactly.
I got the texture to show using glTexImage2D instead of glTexStorage2D and glTexSubImage2D.
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA, GL_FLOAT, data);
I also had to set the parameters explicitly, even though I'm using texelFetch().
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
Binding the uniform explicitly was not necessary.
What is strange is that, according to the docs, glTexStorage2Dis equivalent to this:
for (i = 0; i < levels; i++) {
glTexImage2D(target, i, internalformat, width, height, 0, format, type, NULL);
width = max(1, (width / 2));
height = max(1, (height / 2));
}
However, this combination does not work.
glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGBA, w, h);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_FLOAT, data);
I still have to figure out why is that, but at least I got it to work.

OpenGL & GLSL: Greyscale texture not showing

I'm trying to show a greyscale texture on the screen. I create my texture via
glGenTextures(1, &heightMap);
glBindTexture(GL_TEXTURE_2D, heightMap);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R32F, 512, 512, 0, GL_RED, GL_FLOAT, colorData);
colorData is a float[512*512] with values between 0.0 and 1.0.
When rendering, I use:
glBindTexture(GL_TEXTURE_2D, heightMap);
glUniform1i(shader.GetUniformLocation("textureSampler"), 0);
shader.GetUniformLocation is a function of a library we use at university. It is essentially the same as glGetUniformLocation(shader, "textureSampler"), so don't be confused by it.
I render two triangles via triangle strip. My fragment shader is:
#version 330
layout(location = 0) out vec4 frag_color;
in vec2 texCoords;
uniform sampler2D textureSampler;
void main()
{
frag_color = vec4(texture(textureSampler, texCoords).r, 0, 0, 1);
}
I know the triangles are rendered correctly (e.g. if I use vec4(1.0, 0, 0, 1) for frag_color, I get a completely red screen). However with the line above, I only get a completely black screen. Every texture value seems to be 0.0.
Does anyone have an idea, what I have done wrong? Are there mistakes in that few lines of code or are these completely correct and the error is somewhere else?
As one of the comments below says, setting glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); and glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); solves the problem. :)