When I bind my image using glTexImage2D, it renders fine.
First, the code in the fragment shader:
uniform sampler2D tex;
in vec2 tex_coord;
// main:
fragment_out = mix(
texture(tex, tex_coord),
vec4(tex_coord.x, tex_coord.y, 0.0, 1.0),
0.5
);
Using this code, it draws the image:
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height,
0, GL_RGBA, GL_UNSIGNED_BYTE, image);
But if I bind it via glTexSubImage2D, the texture is drawn just black.
glTexStorage2D(GL_TEXTURE_2D,
1,
GL_RGBA,
width, height);
glTexSubImage2D(GL_TEXTURE_2D,
0,
0, 0,
width, height,
GL_RGBA,
GL_UNSIGNED_BYTE,
image);
When I replace GL_RGBA with GL_RGBA8 or GL_RGBA16 in the second code, the rendering is distorted.
This is the whole texture loading and binding code:
GLuint texture;
glGenTextures(1, &texture);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
int width, height;
unsigned char *image = SOIL_load_image(Resource::getPath("Images/2hehwdv.jpg").c_str(), &width, &height, 0, SOIL_LOAD_RGBA);
std::cout << SOIL_last_result() << std::endl;
std::cout << width << "x" << height << std::endl;
glTexStorage2D(GL_TEXTURE_2D,
1,
GL_RGBA,
width, height);
glTexSubImage2D(GL_TEXTURE_2D,
0,
0, 0,
width, height,
GL_RGBA,
GL_UNSIGNED_BYTE,
image);
//glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
Can someone explain this behavior to me?
glTexStorage2D( GL_TEXTURE_2D, 1, GL_RGBA, width, height );
^^^^^^^ not sized
GL_RGBA is an unsized format.
glTexStorage2D()'s internalFormat parameter must be passed a sized internal format.
A glGetError() call after glTexStorage2D() would have returned GL_INVALID_ENUM:
GL_INVALID_ENUM is generated if internalformat is not a valid sized internal format.
Try a sized format from Table 1 like GL_RGBA8.
Related
I set up framebuffer texture on which to draw a scene on as follows:
glGenFramebuffers(1, &renderFBO);
glGenTextures(1, &renderTexture);
glBindTexture(GL_TEXTURE_2D, renderTexture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32F, W_WIDTH, W_HEIGHT, 0, GL_RGBA, GL_FLOAT, NULL);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
int width, height;
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &width);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &height);
cout << "width: " << width << " height: " << height << endl;
glBindFramebuffer(GL_FRAMEBUFFER, renderFBO);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, renderTexture, 0);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
By validating width and height, they do indeed have the correct dimensions, 1024, 768.
Now I am trying to set up a second texture, to be used as a counter, meaning I am trying to count the occurences of every color value, by incrementing the appropriate texel in the texture. Since I am trying to count the colors, I use a 2D texture of size 256 x 3. In each cell I want a single unsigned integer, showing the number of times a specific color value has appeared up until that point. For instance, if the color (255,5,3) was encountered, I would to increment the numbers inside tex[255][0], tex[5][0], tex[3][0]
num_bins = 256;
GLuint* hist_data = new GLuint[num_bins * color_components]();
glGenTextures(1, &tex_output);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, tex_output);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R32UI, 256, 3, 0, GL_RED, GL_UNSIGNED_INT, hist_data);
glBindImageTexture(0, tex_output, 0, GL_FALSE, 0, GL_READ_WRITE, GL_R32UI);
int width, height;
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &width);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &height);
cout << "width: " << width << " height: " << height << endl;
I used the sized and base internal format correspondence shown in the opengl specification. However when this code executes the output values are width=0, height=0. The input data doesn't seem to be the issue, because I tried providing NULL as the last argument and it still didn't work. What am I missing here?
EDIT:
Incrementation of the values in the texture is supposed to happen inside the following compute shader:
#version 450
layout(local_size_x = 1, local_size_y = 1) in;
layout(rgba32f, binding = 0) uniform image2D img_input;
layout(r32ui, binding = 1) uniform uimage2D img_output;
void main() {
// grabbing pixel value from input image
vec4 pixel_color = imageLoad(img_input, ivec2(gl_GlobalInvocationID.xy));
vec3 rgb = round(pixel_color.rgb * 255);
ivec2 r = ivec2(rgb.r, 0);
ivec2 g = ivec2(rgb.g, 1);
ivec2 b = ivec2(rgb.b, 2);
uint inc = 1;
imageAtomicAdd(img_output, r, inc);
imageAtomicAdd(img_output, g, inc);
imageAtomicAdd(img_output, b, inc);
}
You get a GL_INVALID_OPERATION error, because the texture format argument doesn't correspond with the internal format of the texture. For an unsigned integral format you have to use GL_RED_INTEGER instead of GL_RED (see glTexImage2D):
glTexImage2D(GL_TEXTURE_2D, 0, GL_R32UI, 256, 3, 0, GL_RED, GL_UNSIGNED_INT, hist_data);
glTexImage2D(GL_TEXTURE_2D, 0, GL_R32UI, 256, 3, 0, GL_RED_INTEGER, GL_UNSIGNED_INT, hist_data);
I recommend using Debug Output, it saves a lot of time and lets you find such errors quickly.
I'm trying to load a PNG representing a color look up table, for my color grading shader.
Here is the Neutral LUT png representation:
I'm unsure how to properly load this as a 3d texture and then pass it into my shader. Clearly, there is color transformation occurring, but I end up with a noisy rainbow mess, rather than the neutral image. For example:
Here is how I create the texture
glGenTextures(1, &lut_texture);
glBindTexture(GL_TEXTURE_3D, lut_texture);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGB8, 32, 32, 32, 0, GL_RGB,
GL_UNSIGNED_BYTE, &lut_image[0]);
Here is how I pass it into the shader:
glUseProgram(colorGradingProgram);
glUniform1f(glGetUniformLocation(colorGradingProgram, "tex0"), 0);
glUniform1f(glGetUniformLocation(colorGradingProgram, "lut"), 1);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, colorBuffers[0]);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_3D, lut_texture);
drawQuad();
glBindTexture(GL_TEXTURE_3D, 0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, 0);
glUseProgram(0);
Here is the frag shader (using GLSL 1.x):
uniform sampler2D tex0;
uniform sampler3D lut;
void main(void)
{
vec4 colorIn = texture2D(tex0, gl_TexCoord[0].st);
vec4 colorOut;
colorOut.rgb = texture3D(lut, colorIn.rgb).rgb;
gl_FragColor = colorOut;
}
What I'm most unclear about is dimensions specified for the call to
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGB8, 32, 32, 32, 0, GL_RGB,
GL_UNSIGNED_BYTE, &lut_image[0]);
Depending on what values I use here, my results vary wildly. The closest thing I can get to a neutral image, is using (2,2,2) as width,height,depth, but this does not resonate with me intuitively and is not totally neutral. I do not understand what I am doing wrong or how this is supposed to be achieved (translating the 2d texture to a 3d volume)
Based on my educated guess from the comments above, you need to
Upload each texture layer separately.
Specify GL_RGBA format.
The folowing code should do that (disclaimer -- untested):
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGB8, 32, 32, 32, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glPixelStorei(GL_UNPACK_ROW_LENGTH, 1024);
for(int z = 0; z < 32; ++z) {
glPixelStorei(GL_UNPACK_SKIP_PIXELS, 32*z);
glTexSubImage3D(GL_TEXTURE_3D, 0, 0, 0, z, 32, 32, 1, GL_RGBA, GL_UNSIGNED_BYTE, &lut_image[0]);
}
Please check following image:
I cannot figure out why this is happening, it just doesn’t make sense, I’ve checked it over and over and it keeps showing the same thing, three of the same image on each side of the skybox and red, green and blue stripes going down them.
What am I doing wrong?
Vertex Shader:
#version 400
in vec3 position;
uniform mat4 mvp;
out vec3 tex;
void main(void) {
gl_Position = mvp * vec4(position, 1.0);
tex = position;
}
Fragmant Shader:
#version 400
uniform samplerCube defuse;
in vec3 tex;
out vec4 out_Color;
void main(void) {
out_Color = texture(defuse, tex);
}
CubeMap Loader
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_CUBE_MAP, texture);
int width, height, numComponents;
unsigned char* imageData = stbi_load((path.getURL() + "posx.png").c_str(), &width, &height, &numComponents, 4);
glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, imageData);
stbi_image_free(imageData);
imageData = stbi_load((path.getURL() + "posy.png").c_str(), &width, &height, &numComponents, 4);
glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, imageData);
stbi_image_free(imageData);
imageData = stbi_load((path.getURL() + "posz.png").c_str(), &width, &height, &numComponents, 4);
glTexImage2D(GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, imageData);
stbi_image_free(imageData);
imageData = stbi_load((path.getURL() + "negx.png").c_str(), &width, &height, &numComponents, 4);
glTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, imageData);
stbi_image_free(imageData);
imageData = stbi_load((path.getURL() + "negy.png").c_str(), &width, &height, &numComponents, 4);
glTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, imageData);
stbi_image_free(imageData);
imageData = stbi_load((path.getURL() + "negz.png").c_str(), &width, &height, &numComponents, 4);
glTexImage2D(GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, imageData);
stbi_image_free(imageData);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_WRAP_R, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_BASE_LEVEL, 0);
glTexParameteri(GL_TEXTURE_CUBE_MAP, GL_TEXTURE_MAX_LEVEL, 0);
glBindTexture(GL_TEXTURE_CUBE_MAP, 0);
return new GLTexture(texture);
You specify stbi to load textures with 4 components – required number of components is the last argument to stbi_load. You also specify to OpenGL that the textures is GL_RGB, which it is not. The fix to this is to either specify the texture to OpenGL as GL_RGBA or decode the texture as 3 components.
I have read this tutorial: Array Texture,
but I don't want to use the glTexStorage3D()(requires OpenGL 4.2) function. First of all, can someone check whether I have implemented this code properly(I'm using glTexImage3D instead of glTexStorage3D):
unsigned int nrTextures = 6;
GLsizei width = 256;
GLsizei height = 256;
GLuint arrayTextureID;
std::vector<unsigned char*> textures(nrTextures);
//textures: Load textures here...
glGenTextures(1, &arrayTextureID);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D_ARRAY, arrayTextureID);
//Gamma to linear color space for each texture.
glTexImage3D(GL_TEXTURE_2D_ARRAY, 0, GL_SRGB, width, height, nrTextures, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
for(unsigned int i = 0; i < nrTextures; i++)
glTexSubImage3D(GL_TEXTURE_2D_ARRAY, 0, 0, 0, i, width, height, 1, GL_RGB, GL_UNSIGNED_BYTE, textures[i]);
/*glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);*/
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D_ARRAY, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
/*glGenerateMipmap(GL_TEXTURE_2D_ARRAY);*/
glBindTexture(GL_TEXTURE_2D_ARRAY, 0);
How do I implement mipmapping with this implementation?
This mostly looks ok. The only problem I can see is that GL_SRGB is not a valid internal texture format. So the glTexImage3D() call needs to use GL_SRGB8 instead:
glTexImage3D(GL_TEXTURE_2D_ARRAY, 0, GL_SRGB8, width, height, nrTextures, 0,
GL_RGB, GL_UNSIGNED_BYTE, NULL);
For generating mipmaps, you can call:
glGenerateMipmap(GL_TEXTURE_2D_ARRAY);
after you filled the texture with data with the glTexSubImage3D() calls.
I have a series of rectangles of different colours and I'm trying to add a texture to one of them. However when I apply the texture to the given rectangle, it just turns black. Below is the function I use to load the texture.
GLuint GLWidget:: LoadTexture(const char * pic, int width, int height){
GLuint Texture;
BYTE * data;
FILE * picfile;
picfile = fopen(pic, "rb");
if (picfile == NULL)
return 0;
data = (BYTE *)malloc(width * height * 3);
fread(data, width * height, 3, picfile);
fclose(picfile);
glGenTextures(1, &Texture);
glBindTexture(GL_TEXTURE_2D, Texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri (GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB8, GL_UNSIGNED_BYTE, data);
return Texture;
}
In another function where the GL_QUADS are drawn, I then have...
GLuint myTex = LoadTexture("texture.bmp", 500, 500);
glEnable(GL_TEXTURE_2D);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glBindTexture(GL_TEXTURE_2D, myTex);
glBegin(GL_QUADS);
glTexCoord2f(1, 1); glVertex3f(42, 10, 42);
glTexCoord2f(1, 0); glVertex3f(42, 10, -42);
glTexCoord2f(0, 0); glVertex3f(-42,10,-42);
glTexCoord2f(0, 1); glVertex3f(-42,10, 42);
glEnd();
If anyone could let me know where I am going wrong that would be great, thanks!
This call
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB8, width, height, 0, GL_RGB8, GL_UNSIGNED_BYTE, data);
is invalid. GL_RGB8 is a valid internalFormat, but it is not a valid enum for format. Use GL_RGB, GL_UNSIGNED_BYTE as format and type if your client-side data is 3 channels with 8 but unsigned int data per channel.
Another thing is
LoadTexture("texture.bmp", 500, 500);
This suggests that you are dealing with BMP files, but your loader only deals with completely raw image data.