Texture isn't loading properly OpenGL - c++

I am using MS visual studio 2019 and just learning opengl in c++. I started learning from https://learnopengl.com and it showed many ways to load textures . I just used stb image for loading textures and here's the function for loading a texture :
static unsigned int loadTexture(char const* path)
{
unsigned int textureID;
glGenTextures(1, &textureID);
int width, height, nrComponents;
unsigned char* data = stbi_load(path, &width, &height, &nrComponents, 0);
if (data)
{
GLenum format;
if (nrComponents == 1)
format = GL_RED;
else if (nrComponents == 3)
format = GL_RGB;
else if (nrComponents == 4)
format = GL_RGBA;
glBindTexture(GL_TEXTURE_2D, textureID);
glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
stbi_image_free(data);
}
else
{
std::cout << "Texture failed to load at path: " << path << std::endl;
stbi_image_free(data);
}
return textureID;
}
I tried loading it in a rectangle but somehow it doesn't seem to print properly on it. I'm stuck for a couple of hours and still couldn't find what mistake I've made.
here's the code for initializing the VAO and VBO:
inline void InitTextureBuffer(const char* img_filepath)
{
texture_shader.Bind();
tex_img_id = loadTexture(img_filepath);
glGenVertexArrays(1, &tex_VAO);
glGenBuffers(1, &tex_VBO);
glBindVertexArray(tex_VAO);
glBindBuffer(GL_ARRAY_BUFFER, tex_VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(float) * 6 * 4, NULL, GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), 0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (void*)(2 * sizeof(float)));
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
texture_shader.Unbind();
}
and function to draw the rect :
inline void Draw_Rectangle_IMG(_Point _min, _Point _max)
{
glActiveTexture(GL_TEXTURE0);
glBindVertexArray(tex_VAO);
texture_shader.Bind();
float vertices[] =
{
_min.x, _min.y, 0.0f, 0.0f ,
_max.x, _min.y, 0.0f, 1.0f ,
_max.x, _max.y, 1.0f, 1.0f ,
_min.x, _min.y, 0.0f, 0.0f ,
_max.x, _max.y, 1.0f, 1.0f ,
_min.x, _max.y, 1.0f, 0.0f ,
};
glBindTexture(GL_TEXTURE_2D, tex_img_id);
glBindBuffer(GL_ARRAY_BUFFER, tex_VBO);
glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(vertices), vertices);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindVertexArray(0);
glBindTexture(GL_TEXTURE_2D, 0);
texture_shader.Unbind();
}
the shaders I've used:
const std::string texture_shader_vs =
{
"#version 330 core\n"
"layout(location = 0) in vec2 aPos;\n"
"layout(location = 1) in vec2 aTexCoord;\n"
"\n"
"out vec2 TexCoord;\n"
"\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos,0.0f, 1.0f);\n"
" TexCoord = aTexCoord;\n"
"}\n"
};
const std::string texture_shader_fs =
{
"#version 330 core\n"
"out vec4 FragColor;\n"
"\n"
"in vec2 TexCoord;\n"
"\n"
"// texture sampler\n"
"uniform sampler2D texture1;\n"
"\n"
"void main()\n"
"{\n"
" FragColor = texture(texture1, TexCoord);\n"
"}\n"
};
And the function where I did the calls:
_Point p1, p2;
p1 = _Point(-0.5f, -0.25f);
p2 = _Point(0.5f, 0.25f);
ogl.InitTextureBuffer("resources/textures/1.png");
while (!glfwWindowShouldClose(ogl.GetWindow()))
{
glClear(GL_COLOR_BUFFER_BIT);
ogl.Draw_Rectangle_IMG(p1, p2);
glfwSwapBuffers(ogl.GetWindow());
glfwPollEvents();
}
The texture i tried to print is :
but the result I get is
I can't seem to find the error .Any help would be appreciated. Thanks!

The association of the texture coordinates to the vertices is wrong. Change to:
float vertices[] =
{
_min.x, _min.y, 0.0f, 1.0f,
_max.x, _min.y, 1.0f, 1.0f,
_max.x, _max.y, 1.0f, 0.0f,
_min.x, _min.y, 0.0f, 1.0f,
_max.x, _max.y, 1.0f, 0.0f,
_min.x, _max.y, 0.0f, 0.0f,
};
By default OpenGL assumes that the start of each row of an image is aligned to 4 bytes.
This is because the GL_UNPACK_ALIGNMENT parameter by default is 4. When a RGB image with 3 color channels is loaded to a texture object and 3*width is not divisible by 4 this may cause a misalignment.
Change the alignment by setting the GL_UNPACK_ALIGNMENT to 1, before specifying the texture image with glTexImage2D:
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format, GL_UNSIGNED_BYTE, data);
When you remove glGenerateMipmap(GL_TEXTURE_2D);, then you have to change the minifying function (GL_TEXTURE_MIN_FILTER) as well. Since the filter is GL_LINEAR_MIPMAP_LINEAR, the texture would be "Mipmap Incomplete" if you do not change the minimize function to GL_NEAREST or GL_LINEAR.

Related

OepnGL sampling multitpe textures returns same texture color

main.cpp
#define GLFW_INCLUDE_NONE
#include <GLFW/glfw3.h>
#include <glad/glad.h>
#include <fstream>
#include <iostream>
#include <string>
#include <stdio.h>
#include <SOIL/SOIL.h>
typedef struct vertex{
float x; float y; float z;
float texX; float texY;
float texIndex;
} vertex;
void readTextFile(const char* path, std::string* dst);
int main(){
std::cout << 3 << std::endl;
GLFWwindow* window;
if(!glfwInit())
return -1;
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
window = glfwCreateWindow(640, 480, "Geometry Shader Bathing with Texture Atlas", NULL, NULL);
if(!window){
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
gladLoadGL();
glfwSwapInterval(1);
glFrontFace(GL_CW);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
//create and set vertex buffer
vertex vertices[] = {
-1.0f, -1.0f, 0.0f, 0.0f, 1.0f, 0.0f,
-1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f,
0.0f, -1.0f, 0.0f, 1.0f, 1.0f, 0.0f,
0.0f, -1.0f, 0.0f, 1.0f, 1.0f, 0.0f,
-1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f,
0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f,
0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f,
};
GLuint vertexBuffer;
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_DYNAMIC_DRAW);
//create vertex shader
std::string* shaderSource = new std::string();
readTextFile("shader/vertexShader.glsl", shaderSource);
const char* vertexShaderSourcePtr = shaderSource->c_str();
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSourcePtr, NULL);
glCompileShader(vertexShader);
GLint status;
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &status);
if(status == GL_FALSE){
char buffer[512];
glGetShaderInfoLog(vertexShader, 512, NULL, buffer);
std::cout << "vertexShader FAIL : " << std::endl <<
shaderSource <<std::endl
<< buffer << std::endl;
}else{
std::cout << "vertexShader compile success!" << std::endl;
}
//create fragment shader
readTextFile("shader/fragmentShader.glsl", shaderSource);
const char* fragmentShaderSourcePtr = shaderSource->c_str();
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSourcePtr, NULL);
glCompileShader(fragmentShader);
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &status);
if(status == GL_FALSE){
char buffer[512];
glGetShaderInfoLog(fragmentShader, 512, NULL, buffer);
std::cout << "fragmentShader FAIL : " << std::endl <<
shaderSource <<std::endl
<< buffer << std::endl;
}else{
std::cout << "fragmentShader compile success!" << std::endl;
}
delete shaderSource;
//create and set program
GLint program = glCreateProgram();
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glBindAttribLocation(program, 0, "inPos");
glBindAttribLocation(program, 1, "inTex");
glBindAttribLocation(program, 2, "inTexIndex");
glLinkProgram(program);
//set frameBuffer(renderTarget) index and fragmentShader output name
glBindFragDataLocation(program, 0, "outColor");
glUseProgram(program);
//create and set texture
int width = 384;
int height = 384;
unsigned char* textureRawData0 = SOIL_load_image("resource/character/test0.png", &width, &height, 0, SOIL_LOAD_RGBA);
unsigned char* textureRawData1 = SOIL_load_image("resource/character/test1.png", &width, &height, 0, SOIL_LOAD_RGBA);
glUniform1i(glGetUniformLocation(program, "texture0"), 0);
glUniform1i(glGetUniformLocation(program, "texture1"), 1);
GLuint texture1;
glGenTextures(1, &texture1);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture1);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, textureRawData0);
SOIL_free_image_data(textureRawData0);
std:: cout << "texture0: " << glGetUniformLocation(program, "texture0") << std::endl;
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
GLuint texture2;
glGenTextures(1, &texture2);
glActiveTexture(GL_TEXTURE0 + 1);
glBindTexture(GL_TEXTURE_2D, texture2);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, textureRawData1);
SOIL_free_image_data(textureRawData1);
std:: cout << "texture1: " << glGetUniformLocation(program, "texture1") << std::endl;
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
//input layout : vertex Attribute
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glLinkProgram(program);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(vertex), 0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(vertex), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 1, GL_FLOAT, GL_FALSE, sizeof(vertex), (void*)(5 * sizeof(float)));
while(!glfwWindowShouldClose(window)){
int width, height;
glfwGetFramebufferSize(window, &width, &height);
glViewport(0, 0, width, height);
glClearColor(1.0f, 1.0f, 1.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, 12);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 0;
}
void readTextFile(const char* path, std::string* dst){
std::ifstream f;
f.open(path);
if(f.is_open()){
std::string buff;
dst->clear();
while(!f.eof()){
std::getline(f, buff);
dst->append(buff);
dst->push_back('\n');
}
dst->push_back('\0');
f.close();
}else{
std::cout << "파일 열기 실패 : " << *dst << std::endl;
}
}
ps
#version 330 core
in vec2 outTex;
in float outTexIndex;
out vec4 outColor;
uniform sampler2D texture0;
uniform sampler2D texture1;
void main(){
if(outTexIndex == 0.0f){
outColor = texture(texture0, outTex);
}
else if(outTexIndex == 1.0f){
outColor = texture(texture1, outTex) + vec4(0.5f, 0.5f, 0.5f, 1.0f);
}
}
The input layout(vertex attribute) works fine.
TexIndex is transferred correctly. 1.0f and 0.0f
But the problem is
//fragment shader
texture(texture0, outTex);
texture(texture1, outTex);
both returns same texture color but it is actually different texture from test1.png and test2.png. what is the problem with my code?
++
I tried below fragment shader following the answer of #Rabbid76
#version 330 core
in vec2 outTex;
in float outTexIndex;
out vec4 outColor;
uniform sampler2D texture0;
uniform sampler2D texture1;
void main()
{
vec4 c1 = texture(texture0, outTex) + vec4(0.5f, 0.0f, 0.0f, 1.0f);
vec4 c2 = texture(texture1, outTex) + vec4(0.0f, 0.5f, 0.0f, 1.0f);
outColor = mix(c1, c2, outTexIndex);
}
Color applied but still sampling from same the texture. looks like somehow glActiveTexture(i); doesn't work.
ps. test1.png and test2.png is completely different picture. above screenshot is test1.png
++ when I changed
glGenTexture(1, &texture0);
glGenTexture(1, &texture1);
to
glGenTexture(0, &texture0);
glGenTexture(0, &texture1);
then sampler2D texture0 and sampler2D texture1 both sample from texture1 test1.png (not form texture0 which is test0.png)
what is happening?
++
vs
#version 330
in vec3 inPos;
in vec2 inTex;
in float inTexIndex;
out vec2 outTex;
out float outTexIndex;
void main(){
outTex = inTex;
gl_position = vec4(inPos, 1.0f);
outTexIndex = inTexIndex;
}
Since the condition if(outTexIndex == 0.0f) depends on a fragment shader input, that may cause undefined behavior.
See (most recent) OpenGL Shading Language 4.60 Specification - 8.9. Texture Functions
[...] Some texture functions (non-“Lod” and non-“Grad” versions) may require implicit derivatives. Implicit derivatives are undefined within non-uniform control flow and for non-fragment shader texture fetches. [...]
respectively Non-uniform flow control.
Lookup both textures and mix the colors to solve the issue:
#version 330 core
in vec2 outTex;
in float outTexIndex;
out vec4 outColor;
uniform sampler2D texture0;
uniform sampler2D texture1;
void main()
{
vec4 c1 = texture(texture0, outTex);
vec4 c2 = texture(texture1, outTex) + vec4(0.5f, 0.5f, 0.5f, 1.0f);
outColor = mix(c1, c2, outTexIndex);
}
I thought if I have a compiled example of multiple textures that working no problem, then I can find the problem of my code changing my code line to line comparing with the example. So I found this one.
tutorial
example code
It was SDL based. So I changed initialization code and loop to glfw. And It worked fine on my environment.
Even after I changed my code's texture loading part completely same with the example, the problem wasn't solved. So it must be other part that makes the problem. Finally, I found this.
//input layout : vertex Attribute
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
//*****here!*****
//glLinkProgram(program);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(vertex), 0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(vertex), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 1, GL_FLOAT, GL_FALSE, sizeof(vertex), (void*)(5 * sizeof(float)));
After I just add two characters "//", result was shown as I intended.
I'm not sure about the reason that one line of code makes problem.

Whenever I run my OpenGL Code I get a white screen and a crash

Whenever I run my code for OpenGL, the window that I create will pop-up for a few seconds as a blank, white, window and then immediately crashed. I get the error code: (process 15692) exited with code -1073741819.
I'm not sure what the problem is. I used some classes that I made such as, Shader which just creates a program given two shaders. Here is the code below:
#pragma once
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
#include <vector>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
#ifndef STBI_INCLUDE_STB_IMAGE_H
#define STB_IMAGE_IMPLEMENTATION
#include "stb_image.h"
#endif
#include "Shader.h"
void processInput(GLFWwindow* window, Shader Program) {
static glm::mat4 BasicMatrix;
static glm::vec3 BasicVector;
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS) {
glfwSetWindowShouldClose(window, true);
}
if (glfwGetKey(window, GLFW_KEY_W) == GLFW_PRESS) {
BasicMatrix = glm::mat4(1.0f);
BasicMatrix = translate(BasicMatrix, glm::vec3(0.05f, 0.0f, 0.0f));
glUniformMatrix4fv(glGetUniformLocation(Program.ID, "Matrices"), 1, GL_FALSE, glm::value_ptr(BasicMatrix));
}
}
void runTest() {
if (!glfwInit()) {
throw(-1);
std::cout << "Error: GLFW Init Failed" << std::endl;
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(1920, 1080, "HAHAH BRRR", NULL, NULL);
if (window == NULL) {
std::cout << "Failed to create GLFW Window \n";
glfwTerminate();
throw (-1);
}
glfwMakeContextCurrent(window);
glewExperimental = true;
if (glewInit() != GLEW_OK) {
std::cout << "GLEW INIT FAILED\n";
exit(1);
}
//The first two parameters set the position of the lower left corner
glViewport(0, 0, 1920, 1080);
float vertex[] = {
-0.15f, -0.15f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f,
-0.15f, 0.15f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f,
0.15f, 0.15f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f,
-0.15f, 0.15f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f
};
unsigned int vertices[] = {
0, 1, 3,
3, 1, 2
};
const char* vertexTexShader =
"#version 330 core\n"
"\n"
"layout (location = 0) in vec3 aPos;\n"
"layout (location = 1) in vec3 aColor;\n"
"layout (location = 2) in vec2 TexCoords;\n"
"\n"
"out vec3 Color;\n"
"out vec2 TexCoord;\n"
"\n"
"uniform mat4 Matrices;\n"
"\n"
"void main(){\n"
"\n"
"gl_Position = Matrices * vec4(aPos, 1.0f);\n"
"Color = aColor;\n"
"TexCoord = TexCoords;\n"
"\n"
"}\n";
const char* fragmentTexShader =
"#version 330 core\n"
"\n"
"in vec3 Color;\n"
"in vec2 TexCoord;\n"
"out vec4 FragColor;\n"
"uniform sampler2D texture1;\n"
"\n"
"void main(){\n"
"\n"
"\n"
"FragColor = texture(texture1, TexCoord) * Color;\n"
"\n"
"\n"
"}\n";
Shader GameProgram(vertexTexShader, fragmentTexShader, 1);
unsigned int VAO4, VBO4, EBO4;
glGenVertexArrays(1, &VAO4);
glBindVertexArray(VAO4);
glGenBuffers(1, &VBO4);
glBindBuffer(GL_ARRAY_BUFFER, VBO4);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertex), vertex, GL_STATIC_DRAW);
glGenBuffers(GL_ELEMENT_ARRAY_BUFFER, &EBO4);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO4);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(0));
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(6 * sizeof(float)));
glEnableVertexAttribArray(2);
unsigned int Texture3;
glGenTextures(1, &Texture3);
glBindTexture(GL_TEXTURE_2D, Texture3);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
int width, heigth, nrChannles;
unsigned char* data = stbi_load("src/Images/awesomeface.png", &width, &heigth, &nrChannles, 4);
if (data) {
glTexImage2D(GL_TEXTURE_2D, 1, GL_RGB, width, heigth, 0, GL_RGBA, GL_UNSIGNED_BYTE, &data);
glGenerateMipmap(GL_TEXTURE_2D);
}
else {
std::cout << "ERROR: Could Not Generate Texture\n REASON:";
std::cout << stbi_failure_reason();
}
GameProgram.use();
glUniform1i(glGetUniformLocation(GameProgram.ID, "texture1"), 0);
while (glfwWindowShouldClose(window)) {
processInput(window, GameProgram);
glClearColor(0.5f, 0.2f, 0.6f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glEnable(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, Texture3);
glBindVertexArray(VAO4);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
}
All the errors in your application can be easily found while debugging your program. Hence I voted to close your question.
Anyway here is a brief list of your mistakes:
The shader program fails to compile, because the type of the variable Color is vec3:
"FragColor = texture(texture1, TexCoord) * Color;
FragColor = texture(texture1, TexCoord) * vec4(Color, 1.0);
Furthermore there is a typo:
glGenBuffers(GL_ELEMENT_ARRAY_BUFFER, &EBO4);
glGenBuffers(1, &EBO4);
The 2nd argument of glTexImage2D must be 0:
glTexImage2D(GL_TEXTURE_2D, 1, GL_RGB, width, heigth, 0, GL_RGBA, GL_UNSIGNED_BYTE, &data);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, heigth, 0, GL_RGBA, GL_UNSIGNED_BYTE, &data);
Your application loop terminates immediately:
while (glfwWindowShouldClose(window)) {
while (!glfwWindowShouldClose(window)) {

OpenGL glReadPixels returns 0

When Im rendering to on-screen buffer everything goes fine, but when reading pixels from FrameBuffer with glReadPixels it always returns 0.
The pseudocode is the following:
Bind texture to FrameBuffer:
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
int width, height;
width = 2;
height = 2;
float texture_data[] = {
1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f,
1.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f
};
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_FLOAT, texture_data);
GLuint framebuffer;
glGenFramebuffers(1, &framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0);
GLint texture_coord_attribute = glGetAttribLocation(program, "texture_coord");
glEnableVertexAttribArray(texture_coord_attribute);
glVertexAttribPointer(texture_coord_attribute, 2, GL_FLOAT, GL_FALSE,
sizeof(vertices[0]), (void*)(sizeof(float) * 5));
Fragment/Vertex shaders:
static const char* vertex_shader_text =
"#version 330\n""
"attribute vec2 vPos;\n"
"attribute vec2 texture_coord;\n"
"varying vec2 Texcoord;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(vPos, 0.0, 1.0);\n"
" Texcoord = texture_coord;\n"
"}\n";
static const char* fragment_shader_text =
"#version 330\n"
"varying vec2 Texcoord;\n"
"uniform sampler2D tex;\n"
"void main()\n"
"{\n"
" gl_FragColor = texture(tex, Texcoord);\n"
"}\n";
Read Pixels in Main loop:
glViewport(0, 0, 720, 480);
glUseProgram(program);
glDrawArrays(GL_TRIANGLES, 0, 6);
GLubyte pixels[3] = {0};
glReadBuffer(GL_COLOR_ATTACHMENT0);
glReadPixels(360, 240, 1, 1, GL_RGB, GL_FLOAT, pixels);
// Any value returns 0 not only 360 and 240
printf("|%f||%f||%f|\n", pixels[0], pixels[1], pixels[2]);
glfwSwapBuffers(window);
glfwPollEvents();
This is the pipeline I follow. What is wrong here?
Thanks.
The 5th and 6th parameter (format and type) of glReadPixels specifies the format and data type of the target pixel data.
Since you want to read to a buffer with the element data type GLubyte, the type has to be GL_BYTE.
Change your code like this:
glReadPixels(360, 240, 1, 1, GL_RGB, GL_BYTE, pixels);
Or read the data to a buffer of type GLfloat:
GLfloat pixels[3];
glReadPixels(360, 240, 1, 1, GL_RGB, GL_FLOAT, pixels);
Note, what you do, is to read 12 bytes (sizeof(float)*3) to a buffer with a size of 3 bytes (GLubyte pixels[3]). This means a part of the floating point value which represents the red color channel is stored to the buffer. The rest overwrites some memory with bad access.

Switching more than one shader program (in OpenGL-ES 2.0)

I am really having nightmare to achieve what I required in OpenGles 2.0
Before posting the code reference, let me tell what I need.
I have 2D texture fragment shader. On top of the texture I want to draw red color line. I am able to draw the line but coloring to red is not working.
Shader declaration:
static const char s_v_shader[] =
"attribute vec4 vPosition; \n"
"attribute vec2 my_Texcoor; \n"
"uniform mat4 u_TransMatrix; \n"
"varying vec2 vTexcoor; \n"
"void main() \n"
"{ \n"
" vTexcoor = my_Texcoor; \n"
" gl_Position = u_TransMatrix*vPosition; \n"
"} \n";
static const char s_f_shader[] =
"precision mediump float;\n"
"uniform sampler2D my_Sampler; \n"
"varying vec2 vTexcoor; \n"
"void main() \n"
"{ \n"
" vec4 tex = texture2D(my_Sampler, vTexcoor); \n"
" gl_FragColor = tex; \n"
"} \n";
On top of texture I am rendering video frames from camera in infinite loop.
Before rendering video, I am setting up co-ordinates of 2D texture with below code.
Now I will explain my code from main function
main()
{
const GLfloat vertices[][2] = {
{ -1.0f, -1.0f},
{ 1.0f, -1.0f},
{ -1.0f, 1.0f},
{ 1.0f, 1.0f}
};
const GLfloat texcoords[][2] = {
{ 0.0f, 1.0f},
{ 1.0f, 1.0f},
{ 0.0f, 0.0f},
{ 1.0f, 0.0f}
};
GLfloat transformMatrix[16] =
{
1.0f, 0.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f, 0.0f,
0.0f, 0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f
};
// setup OpenGl environment......
Setup_coordinates()
}
Setup_coordinates()
{
LoadShaders(s_v_shader, s_f_shader);
-- Complete function defined below
// By now I should be using shader program.
// Grab location of shader attributes.
GLint locVertices = glGetAttribLocation(programHandle, "vPosition");
GLint locTexcoord = glGetAttribLocation(programHandle, "my_Texcoor");
// Transform Matrix is uniform for all vertices here.
GLint locTransformMat = glGetUniformLocation(programHandle, "u_TransMatrix");
GLint locSampler = glGetUniformLocation(programHandle, "my_Sampler");
/* Create the texture. */
glGenTextures(1, &gTexObj);
glBindTexture(GL_TEXTURE_2D, gTexObj);
if (gTexObj == 0)
{
printf("Could not load the texture \n");
return -1;
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glUniformMatrix4fv(locTransformMat, 1, GL_FALSE, transformMatrix);
glUniform1i(locSampler, 0);
glClearColor(0.0f, 0.5f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
while(1) -- Infinite loop to render video frames on 2D texture and draw red color line.
{
// enable vertex arrays to push the data.
glEnableVertexAttribArray(locVertices);
glEnableVertexAttribArray(locTexcoord);
// set data in the arrays.
glVertexAttribPointer(locVertices, 2, GL_FLOAT, GL_FALSE, 0, &vertices[0][0]);
glVertexAttribPointer(locTexcoord, 2, GL_FLOAT, GL_FALSE, 0, &texcoords[0][0]);
Render video frames logic goes here...................................
Each frame of video is abosultely rendering fine.
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
Now comes the tricky part to draw the line and color it with red.
float red_left_1[] =
{
-0.85f, -0.9f, -0.6f, -0.5f,
};
glVertexAttribPointer ( 1, 2, GL_FLOAT, GL_FALSE, 0, red_left_1 );
glEnableVertexAttribArray (1 );
glDrawArrays ( GL_LINES , 0, 2 );
glLineWidth( width_test );
}
}
void LoadShaders(const char * vShader, const char * pShader)
{
vertShaderNum = glCreateShader(GL_VERTEX_SHADER);
pixelShaderNum = glCreateShader(GL_FRAGMENT_SHADER);
if (CompileShader(vShader, vertShaderNum) == 0)
{
printf("%d: PS compile failed.\n", __LINE__);
return;
}
if (CompileShader(pShader, pixelShaderNum) == 0)
{
printf("%d: VS compile failed.\n", __LINE__);
return;
}
programHandle = glCreateProgram();
glAttachShader(programHandle, vertShaderNum);
glAttachShader(programHandle, pixelShaderNum);
// Bind vPosition to attribute 0
glBindAttribLocation ( programHandle, 0, "vPosition" );
glLinkProgram(programHandle);
// Check if linking succeeded.
GLint linked = 0;
glGetProgramiv(programHandle, GL_LINK_STATUS, &linked);
if (!linked)
{
printf("%d: Link failed.\n", __LINE__);
// Retrieve error buffer size.
GLint errorBufSize, errorLength;
glGetShaderiv(programHandle, GL_INFO_LOG_LENGTH, &errorBufSize);
char * infoLog = (char*)malloc(errorBufSize * sizeof (char) + 1);
if (infoLog)
{
// Retrieve error.
glGetProgramInfoLog(programHandle, errorBufSize, &errorLength, infoLog);
infoLog[errorBufSize + 1] = '\0';
fprintf(stderr, "%s", infoLog);
free(infoLog);
}
return;
}
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glUseProgram(programHandle);
}
Most of the genius peoples suggested to declare one more shader as above but replace uniform sampler2D my_Sampler with uniform vec4 color.
void main()
{
gl_FragColor = color
}
Then switch between these shader programs while showing texture and drawing color lines using glUseProgram.
I tried and absolutely given up as switching to shader program for drawing lines is not working.
Here is code for generating a colored 1x1 texture that you can use for your line (goes in your main or Setup_coordinates). With this solution you won't need another shader.
GLuint lineTexture;
glGenTextures(1, &lineTexture);
unsigned char red[4] = { 255, 0, 0, 255};
glBindTexture(GL_TEXTURE_2D, lineTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 1, 1, 0, GL_RGBA, GL_UNSIGNED_BYTE, red);
glBindTexture(GL_TEXTURE_2D, 0);
Before calling glDrawArrays, use this to switch to the correct texture.
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, <lineTexture or gTexObj>);
glUniform1i(locSampler, 0);
A more general solution (that I personally implement in my OpenGL projects) is to create a white texture, add a color uniform to your shader and multiply the uniform with the texture2D return value. With this method you can create different colored lines and graphics from the same white texture, only changing the color uniform. For the video frames, you would send in a white color and the pixels will remain unchanged. This will require very few changes to your code, I'm sure you can figure it out if you think it sounds better. :)

OpenGL. YUV painting

I want to draw yuv image that i got from JPEG file with help FFMPEG processing using OP. Original JPEG image size 640x480 and output YUV image does not have any strides. There are shaders i use:
const GLchar* vertexSource =
"in vec2 position;"
"in vec2 texcoord;"
"out vec2 Texcoord;"
"void main() {"
" Texcoord = texcoord;"
" gl_Position = vec4(position, 0.0, 1.0); }";
const GLchar* fragmentSource = ""
"uniform sampler2D y_tex;"
"uniform sampler2D u_tex;"
"uniform sampler2D v_tex;"
"in vec2 Texcoord;"
"layout( location = 0 ) out vec4 fragcolor;"
""
"const vec3 R_cf = vec3(1.164383, 0.000000, 1.596027);"
"const vec3 G_cf = vec3(1.164383, -0.391762, -0.812968);"
"const vec3 B_cf = vec3(1.164383, 2.017232, 0.000000);"
""
"void main() {"
" float y = texture(y_tex, Texcoord).r;"
" float u = texture(u_tex, Texcoord).r;"
" float v = texture(v_tex, Texcoord).r;"
" vec3 yuv = vec3(y,u,v);"
" fragcolor = vec4(0.0, 0.0, 0.0, 1.0);"
" fragcolor.r = dot(yuv, R_cf);"
" fragcolor.g = dot(yuv, G_cf);"
" fragcolor.b = dot(yuv, B_cf); }";
Below is data preparing code:
GLuint elements[] = {
0, 1, 2,
2, 3, 0
};
static GLfloat vertices[] = {
-0.5f, 0.5f, 0.0f, 0.0f, // Top-left
0.5f, 0.5f, 1.0f, 0.0f, // Top-right
0.5f, -0.5f, 1.0f, 1.0f, // Bottom-right
-0.5f, -0.5f, 0.0f, 1.0f // Bottom-left
};
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
GLuint ebo;
glGenBuffers(1, &ebo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ebo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(elements), elements, GL_STATIC_DRAW);
GLint m_posAttr = glGetAttribLocation(shaderProgram, "position");
GLint m_texAttr = glGetAttribLocation(shaderProgram, "texcoord");
glEnableVertexAttribArray(m_posAttr);
glEnableVertexAttribArray(m_texAttr);
glVertexAttribPointer(m_posAttr, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), 0);
glVertexAttribPointer(m_texAttr, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat),
(void*)(2 * sizeof(GLfloat)));
// FFMPEG processing inside
AVFrame* frame = decodeFrame("c:\\test2.jpg");
GLuint ytex;
glGenTextures(1, &ytex);
glActiveTexture(GL_TEXTURE0);
glUniform1i(glGetUniformLocation(shaderProgram, "y_tex"), 0);
glBindTexture(GL_TEXTURE_2D, ytex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, frame->width, frame->height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, frame->data[0]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
GLuint utex;
glGenTextures(1, &utex);
glActiveTexture(GL_TEXTURE1);
glUniform1i(glGetUniformLocation(shaderProgram, "u_tex"), 1);
glBindTexture(GL_TEXTURE_2D, utex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, frame->width / 2, frame->height / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, frame->data[1]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
GLuint vtex;
glGenTextures(1, &vtex);
glActiveTexture(GL_TEXTURE2);
glUniform1i(glGetUniformLocation(shaderProgram, "v_tex"), 2);
glBindTexture(GL_TEXTURE_2D, vtex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, frame->width / 2, frame->height / 2, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, frame->data[2]);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
And very simple draw cicle:
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glfwSwapBuffers(window);
But all i got is black screen. Can someone point out my mistakes?