Get colour of specific pixel in an OpenGL texture? - c++

I'm trying to get the colour of a specific pixel from a specific texture using OpenGL (C++). I've been looking at glGetTexImage() since it looks somewhat like what I want, but I can't figure out the context in which I should put it. Am I wrong? It doesn't need to be the fastest option since it's not a frame-by-frame thing; just when the game starts up.
The texture isn't going to be rendered to the screen and is just used as a way to get information. I use the following function to load the texture.
GLuint TextureUtil::loadTexture(const char* filename, int* widthVar, int* heightVar) {
unsigned char* image = SOIL_load_image(filename, widthVar, heightVar, NULL, SOIL_LOAD_RGBA);
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
if (image) {
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, *widthVar, *heightVar, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
glGenerateMipmap(GL_TEXTURE_2D);
} else {
std::cout << "ERROR: TextureUtil.cpp - Texture loading failed." << std::endl;
}
glActiveTexture(0);
glBindTexture(GL_TEXTURE_2D, 0);
SOIL_free_image_data(image);
return texture;
}

Assuming you are interested in a pixel at coordinates column x and row y, then:
unsigned char* image = SOIL_load_image(filename, widthVar, heightVar, NULL, SOIL_LOAD_RGBA);
int width = *widthVar;
unsigned char* pixel = image + y * width * 4 + x * 4;
unsigned char red = pixel[0];
unsigned char green = pixel[1];
unsigned char blue = pixel[2];
unsigned char alpha = pixel[3];
Error checking of the SOIL_load_image function is left for to you to add. I would fully expect it to return nullptr if the filename didn't exist, for example.

Related

OpenGL 4.1 and lower Black Texture, Mac and Windows

I had this problem when compiling my OpenGL code on lower-end PC's that don't support OpenGL 4.5 and macs. In my regular code, I would use functions like glCreateTextures and glTextureStorage2D, but they are not supported in other versions so I went the other glGenTextures path.
Here's the image generation code:
Texture::Texture(const std::string& path)
: m_Path(path)
{
int width, height, channels;
stbi_set_flip_vertically_on_load(1);
unsigned char* data = stbi_load(path.c_str(), &width, &height, &channels, 0);
RW_CORE_ASSERT(data, "Failed to load image!");
m_Width = width;
m_Height = height;
GLenum internalFormat = 0, dataFormat = 0;
if (channels == 4)
{
internalFormat = GL_RGBA8;
dataFormat = GL_RGBA;
}
else if (channels == 3)
{
internalFormat = GL_RGB8;
dataFormat = GL_RGB;
}
m_InternalFormat = internalFormat;
m_DataFormat = dataFormat;
RW_CORE_ASSERT(internalFormat & dataFormat, "Format not supported!");
glGenTextures(1, &m_ID);
glBindTexture(GL_TEXTURE_2D, m_ID);
glTexParameteri(m_ID, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(m_ID, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(m_ID, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(m_ID, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexImage2D(m_ID, 1, internalFormat, m_Width, m_Height, 0, dataFormat, GL_UNSIGNED_BYTE, data);
glBindTexture(GL_TEXTURE_2D, 0);
stbi_image_free(data);
}
And I want to bind my textures to specific slots on the GPU so I have this function:
void Texture::Bind(uint32_t slot) const
{
glActiveTexture(GL_TEXTURE0 + slot);
glBindTexture(GL_TEXTURE_2D, m_ID);
}
Here's the screenshot of what gets drawn:
To make sure it wasn't a rendering problem I decided to put it into ImGui's renderer.
And here's the picture I am supposed to get:
And image imports correctly, I get no errors and same importing code and paths work on higher-end PC and the only thing that's changed is that the higher-end PC has OpenGL 4.5 texture generation code.
It turns out I had to specify GL_TEXTURE_2D in these places instead of texture ID:
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexImage2D(GL_TEXTURE_2D, 1, internalFormat, m_Width, m_Height, 0, dataFormat, GL_UNSIGNED_BYTE, data);

which image format takes less time to load into memory

I have a requirement where i need to load a sequence of images to the memory and than play them back to back.
I load all of the files into a std::vector and after they all get loaded i play them.
this is code for loading each file.
void loadTextureFromFile(const GLchar *file)
{
// Create Texture object
Texture2D texture;
// Load image
int width, height, channels;
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
unsigned char* image = SOIL_load_image(file, &width, &height, &channels, SOIL_LOAD_AUTO);
// Set The Internal Format
if (channels > 3)
{
texture.Internal_Format = GL_RGBA;
texture.Image_Format = GL_RGBA;
}
else
{
texture.Internal_Format = GL_RGB;
texture.Image_Format = GL_RGB;
}
// Now generate texture
texture.Generate(width, height, image);
imageSequence.push_back(texture);
// And finally free image data
SOIL_free_image_data(image);
}
////////////////////////////////////////////////////////
Texture2D::Texture2D()
: Width(0), Height(0), Internal_Format(GL_RGB), Image_Format(GL_RGB), Wrap_S(GL_REPEAT), Wrap_T(GL_REPEAT), Filter_Min(GL_LINEAR), Filter_Max(GL_LINEAR) , WrapMode(SumTextureDecl::TextureWrapMode::Repeat)
{
glGenTextures(1, &this->ID);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////
void Texture2D::Generate(GLuint width, GLuint height, unsigned char* data)
{
float aniso = 0.0f;
this->Width = width;
this->Height = height;
glBindTexture(GL_TEXTURE_2D, this->ID);
// Set Texture wrap and filter modes
glGetFloatv(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT, &aniso);
// Create Texture
glTexImage2D(GL_TEXTURE_2D, 0, this->Internal_Format, width, height, 0, this->Image_Format, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, this->Wrap_S);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, this->Wrap_T);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, aniso);
// Unbind texture
glBindTexture(GL_TEXTURE_2D, 0);
}
it takes some time to load all the files of image sequence so what file format would load the fastest ?
I am using soil to load images by using some other library would the loading become more fast ?

How to properly load an image to use as an OpenGL texture?

I am trying to load an image into an OpenGL texture using SOIL2; however, it never seems to be correct unless I use SOIL2's load to texture function. I have tried using STB image and Devil, but both get similar results.
Code:
GLuint load_image(const std::string& path) {
int iwidth, iheight, channels;
unsigned char* image = SOIL_load_image(path.c_str(), &iwidth, &iheight, &channels, SOIL_LOAD_RGBA);
// std::cout << SOIL_last_result() << std::endl;
// float* image = stbi_loadf(path.c_str(), &iwidth, &iheight, &channels, STBI_rgb_alpha);
// if(!ilLoadImage(path.c_str()))
// std::cout << "Devil Failed to load image: " << iluErrorString(ilGetError()) << std::endl;
//
// unsigned char* image = ilGetData();
//
// int iwidth = ilGetInteger(IL_IMAGE_WIDTH);
// int iheight = ilGetInteger(IL_IMAGE_HEIGHT);
// int channels = ilGetInteger(IL_IMAGE_CHANNELS);
GLuint texture;
glGenTextures(1, &texture);
glActiveTexture(GL_TEXTURE0 + texture);
GLint old_unpack_alignment;
glGetIntegerv(GL_UNPACK_ALIGNMENT, &old_unpack_alignment);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glBindTexture(GL_TEXTURE_2D, texture);
glCheckError();
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glCheckError();
GLenum original_format = (channels == 4 ? GL_RGBA : GL_RGB);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
glGenerateMipmap(GL_TEXTURE_2D);
glPixelStorei(GL_UNPACK_ALIGNMENT, old_unpack_alignment);
return texture;
}
Screenshot:
What I should get:
I would like to know how to properly load an image into a texture.
Here is an example of what my texture loading function looks like:
unsigned int loadTexture(char const * path)
{
unsigned int textureID;
glGenTextures(1, &textureID);
int width, height, nrComponents;
unsigned char *data = stbi_load(path, &width, &height, &nrComponents, 0);
if (data)
{
GLenum format;
if (nrComponents == 1)
format = GL_RED;
else if (nrComponents == 3)
format = GL_RGB;
else if (nrComponents == 4)
format = GL_RGBA;
glBindTexture(GL_TEXTURE_2D, textureID);
glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, format == GL_RGBA ? GL_CLAMP_TO_EDGE : GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, format == GL_RGBA ? GL_CLAMP_TO_EDGE : GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
stbi_image_free(data);
}
else
{
std::cout << "Texture failed to load at path: " << path << std::endl;
stbi_image_free(data);
}
return textureID
}
I will usually set up my VAO & VBO before hand, then I'll use this to load in a texture. After this I'll configure my shader(s) for use, then within the render loop is where I'll use my shader, set the matrices passing in any of the needed uniforms, then after all the "model" information is completed I'll finally bind the VertexArrays, set the approrpriate texture to Active, then Bind those Texture(s) and fish up with drawing the arrays or primitives.

SOIL - Getting Image Dimensions

I have decided to use SOIL to load images for use with OpenGL for my project. I have this method, which loads an image and returns a GLTexture, which is a struct that hold a GLuint textureid and two ints width and height:
GLTexture loadTexture(const char *filePath) {
GLTexture texture = {};
int width;
int height;
unsigned char *data;
//Load Image File Directly into an OpenGL Texture
texture.id = SOIL_load_OGL_texture
(
filePath,
SOIL_LOAD_AUTO,
SOIL_CREATE_NEW_ID,
SOIL_FLAG_MIPMAPS | SOIL_FLAG_INVERT_Y | SOIL_FLAG_NTSC_SAFE_RGB | SOIL_FLAG_COMPRESS_TO_DXT
);
//Error Checking (Load Process)
if (texture.id == 0) {
fatalError("SOIL Loading Error!");
}
//Generate and Bind Texture
glGenTextures(1, &(texture.id));
glBindTexture(GL_TEXTURE_2D, texture.id);
//Get Width, Height and Data of Image
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &width);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &height);
glGetTexImage(GL_TEXTURE_2D, 0, GL_RGBA, GL_UNSIGNED_BYTE, &data);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, &data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
//Unbind Texture
glBindTexture(GL_TEXTURE_2D, 0);
//Return Texture
texture.width = width;
texture.height = height;
return texture;
}
As far as I know, glGetTexLevelParameteriv() should return the width of the texture that is binded into width and height, but whenever I load an image, this returns 0.
Should I fill in width and height as parameters for the method or is it possible to get them via OpenGL?
The texture id generated by SOIL_load_OGL_texture is overridden in the
glGenTextures(1, &(texture.id));
line (glGenTextures creates a new texture and stores the id in &(texture.id)). All operations afterwards work on the newly created texture. Since this new texture is empty, width and height are 0.
I'm not sure what you want to achieve here, but if you only want to load the texture, then this code might work:
texture.id = SOIL_load_OGL_texture (...);
//Error Checking (Load Process)
if (texture.id == 0) {
fatalError("SOIL Loading Error!");
}
//Just bind and do not create a new texture
glBindTexture(GL_TEXTURE_2D, texture.id);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_WIDTH, &width);
glGetTexLevelParameteriv(GL_TEXTURE_2D, 0, GL_TEXTURE_HEIGHT, &height);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
//Unbind Texture
glBindTexture(GL_TEXTURE_2D, 0);
//Return Texture
texture.width = width;
texture.height = height;
return texture;

OpenGL fails at intesection of textures

I've loaded a texture, and displayed it on a cube and on a plane. At the point where the plane and the cube intersect, there are some ugly visual artifacts.
Here are two pictures demonstrating the issue:
Image 1 - what is this:
Image 2 - same scenario, other perspective:
Here is how I loaded the image:
static const GLenum gl_format[4] = { GL_LUMINANCE, GL_LUMINANCE_ALPHA, GL_BGR, GL_BGRA };
GLuint LoadTGATexture(const char* filename)
{
//image is already loaded in --- unsigned char[] data - int width - int height - int components
unsigned int handle;
glGetFloatv(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT, &max_anisotropy);
glGenTextures(1, &handle);
glBindTexture(GL_TEXTURE_2D, handle);
glTexImage2D(GL_TEXTURE_2D, 0, components, width, height, 0, gl_format[components - 1], GL_UNSIGNED_BYTE, data);
gluBuild2DMipmaps(GL_TEXTURE_2D, components, width, height, gl_format[components - 1], GL_UNSIGNED_BYTE, data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, max_anisotropy);
glBindTexture(GL_TEXTURE_2D, 0);
delete [] data;
return handle;
}
But how can I fix it, and what is my mistake?
I would guess that it has nothing to do with your texture.
By the looks of it you have a problem with clipping. I would verify glEnable(GL_DEPTHTEST), make sure that your near-plane when setting the perspective matrix is >0.0, meaning not 0.0 and not a negative number. Also make sure that your far-plane isn't some insanely huge number. I normally stop at 1000.0 with good results. You might want to do something smaller.