Related
I am facing an issue with font rendering. I am using the tutorial https://learnopengl.com/In-Practice/Text-Rendering for this.
I am developing the application in OpenGLESV3.
I have made different shader programs for 3D object drawing, for loading textures and lastly for the font rendering.
I am drawing the fonts after the 3D objects and texture drawing is done.
However the issue is that the font is not seen when i draw using orthographic projection. I am able to only see the 3D objects and texture. (Even though the shaders are different)
when I only draw font only with orthographic projection it is drawing perfectly.
I am sure it is the issue with the settings in projection, but i am unable to figure out the issue.
Can anybody help please ?
Here is the code inside my render loop
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glViewport(0, 0, (GLsizei)SCR_WIDTH, (GLsizei)SCR_HEIGHT);
/********************************* 3D objects Rendering ********************************************************/
glUseProgram(shaderpgmPoints);
projection = glm::perspective(glm::radians(camspec.fov), (float)SCR_WIDTH / (float)SCR_HEIGHT, 0.1f, 100.0f);
glUniformMatrix4fv(glGetUniformLocation(shaderpgmPoints, "projection"), 1, false, &projection[0][0]);
view = glm::lookAt(camspec.cameraPos, camspec.cameraPos + camspec.cameraFront, camspec.cameraUp);
glUniformMatrix4fv(glGetUniformLocation(shaderpgmPoints, "view"), 1, false, &view[0][0]);
model = glm::mat4(1.0f);
glUniformMatrix4fv(glGetUniformLocation(shaderpgmPoints, "model"), 1, false, &model[0][0]);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, quadBufferObject);
glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 0, 0);
glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, 0, (void*)64);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindBuffer(GL_ARRAY_BUFFER, leftlaneBufferObject);
glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 0, 0);
glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, 0, (void*)192);
;
glEnable(GL_PROGRAM_POINT_SIZE);
glDrawArrays(GL_LINES, 0, 12);
glDisable(GL_BLEND);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindBuffer(GL_ARRAY_BUFFER, rightlaneBufferObject);
glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 0, 0);
glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, 0, (void*)32);
glLineWidth(10.0f);
glDrawArrays(GL_LINES, 0, 2);
glDisable(GL_BLEND);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glUseProgram(0);
/********************************* 3D objects Rendering -End ********************************************************/
/********************************* Texture Rendering ********************************************************/
glUseProgram(shaderpgmTexture);
glm::mat4 transform = glm::mat4(1.0f);
transform = glm::translate(transform, glm::vec3(0.0f, 0.0f, 0.0f));
glUniformMatrix4fv(glGetUniformLocation(shaderpgmTexture, "transform"), 1, false, &transform[0][0]);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glUniform1i(glGetUniformLocation(shaderpgmTexture, "outputTexture"), 1);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, camTexture);
glBindBuffer(GL_ARRAY_BUFFER, camTextureBufferObject);
// position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
// texture coord attribute
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
glDrawArrays(GL_QUADS, 0, 4);
glDisable(GL_BLEND);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glUseProgram(0);
/********************************* Texture Rendering - End ********************************************************/
/********************************* Font Rendering ********************************************************/
glDisable(GL_DEPTH_TEST);
glUseProgram(shaderpgmFont);
glEnable(GL_CULL_FACE);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glm::mat4 project = glm::ortho(0.0f, static_cast<GLfloat>(SCR_WIDTH), 0.0f, static_cast<GLfloat>(SCR_HEIGHT));
glUniformMatrix4fv(glGetUniformLocation(shaderpgmFont, "projection"), 1, false, &project[0][0]);
glUniform3f(glGetUniformLocation(shaderpgmFont, "textColor"), 1.0f, 0.0f, 0.0f);
glActiveTexture(GL_TEXTURE0);
GLfloat scale = 1.0f;
GLfloat x = 10.0f;
GLfloat y = 10.0f;
std::string text = "THE";
std::string::const_iterator c;
for (c = text.begin(); c != text.end(); c++)
{
Character ch = Characters[*c];
GLfloat xpos = x + ch.Bearing.x * scale;
GLfloat ypos = y - (ch.Size.y - ch.Bearing.y) * scale;
GLfloat w = ch.Size.x * scale;
GLfloat h = ch.Size.y * scale;
GLfloat vertices[6][4] = {
{ xpos, ypos + h, 0.0, 0.0 },
{ xpos, ypos, 0.0, 1.0 },
{ xpos + w, ypos, 1.0, 1.0 },
{ xpos, ypos + h, 0.0, 0.0 },
{ xpos + w, ypos, 1.0, 1.0 },
{ xpos + w, ypos + h, 1.0, 0.0 }
};
// Render glyph texture over quad
glBindTexture(GL_TEXTURE_2D, ch.TextureID);
// Update content of VBO memory
glBindBuffer(GL_ARRAY_BUFFER, FontVertexBufferObject);
glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(vertices), vertices); // Be sure to use glBufferSubData and not glBufferData
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// Now advance cursors for next glyph (note that advance is number of 1/64 pixels)
x += (ch.Advance >> 6) * scale; // Bitshift by 6 to get value in pixels (2^6 = 64 (divide amount of 1/64th pixels by 64 to get amount of pixels))
}
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_BLEND);
glDisable(GL_CULL_FACE);
glUseProgram(0);
glEnable(GL_DEPTH_TEST);
/********************************* Font Rendering - End********************************************************/
I am Initializing the font buffer like this
glDisable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glUseProgram(shaderpgmFont);
// FreeType
FT_Library ft;
// All functions return a value different than 0 whenever an error occurred
if (FT_Init_FreeType(&ft))
std::cout << "ERROR::FREETYPE: Could not init FreeType Library" << std::endl;
// Load font as face
FT_Face face;
if (FT_New_Face(ft, "fonts/arial.ttf", 0, &face))
{
std::cout << "ERROR::FREETYPE: Failed to load font" << std::endl;
}
else
{
// Set size to load glyphs as
FT_Set_Pixel_Sizes(face, 0, 48);
// Disable byte-alignment restriction
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// Load first 128 characters of ASCII set
for (GLubyte c = 0; c < 128; c++)
{
// Load character glyph
if (FT_Load_Char(face, c, FT_LOAD_RENDER))
{
std::cout << "ERROR::FREETYTPE: Failed to load Glyph" << std::endl;
continue;
}
// Generate texture
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(
GL_TEXTURE_2D,
0,
GL_ALPHA,
face->glyph->bitmap.width,
face->glyph->bitmap.rows,
0,
GL_ALPHA,
GL_UNSIGNED_BYTE,
face->glyph->bitmap.buffer
);
// Set texture options
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// Now store character for later use
Character character = {
texture,
glm::ivec2(face->glyph->bitmap.width, face->glyph->bitmap.rows),
glm::ivec2(face->glyph->bitmap_left, face->glyph->bitmap_top),
face->glyph->advance.x
};
Characters.insert(std::pair<GLchar, Character>(c, character));
}
glBindTexture(GL_TEXTURE_2D, 0);
// Destroy FreeType once we're finished
FT_Done_Face(face);
FT_Done_FreeType(ft);
// Configure VAO/VBO for texture quads
glGenBuffers(1, &FontVertexBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, FontVertexBufferObject);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat) * 6 * 4, NULL, GL_DYNAMIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), 0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 4 * sizeof(GLfloat), (void*)(2 * sizeof(GLfloat)));
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
glUseProgram(0);
glDisable(GL_CULL_FACE);
glDisable(GL_BLEND);
glEnable(GL_DEPTH_TEST);
In your initialization function, you've set up some OpenGL state via glVertexAttribPointer, which later gets clobbered.
To fix this, modify your text-rendering code so that it makes two calls glVertexAttribPointer after glBindBuffer.
I am trying to add a texture onto my triangle but it just shows up as a black triangle. Here is my starting function that gets called:
GLuint vao;
GLuint shader_programme;
extern "C" void UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API createSimpleWindow()
{
if (std::this_thread::get_id() == MAIN_THREAD_ID)
{
oldContext = glfwGetCurrentContext();
newContext = glfwCreateWindow(640, 480, "window", NULL, oldContext);
glfwMakeContextCurrent(newContext);
// start GLEW extension handler
glewExperimental = GL_TRUE;
glewInit();
glEnable(GL_DEPTH_TEST); // enable depth-testing
glDepthFunc(GL_LESS); // depth-testing interprets a smaller value as "closer"
//triangle pts
float points[] = {
0.0f, 0.5f, 0.0f, 0.5f, 1.0f,
0.5f, -0.5f, 0.0f, 1.0f, 0.0f,
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f
};
GLuint vbo = 0;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(float), points, GL_STATIC_DRAW);
vao = 0;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
//glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 6 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
GLuint vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs, 1, &vertex_shader, NULL);
glCompileShader(vs);
GLuint fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs, 1, &fragment_shader, NULL);
glCompileShader(fs);
shader_programme = glCreateProgram();
glAttachShader(shader_programme, fs);
glAttachShader(shader_programme, vs);
glLinkProgram(shader_programme);
glUseProgram(shader_programme);
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
int width, height, nrComponents;
unsigned char *data = stbi_load("/Users/roma/Desktop/Escape Tech/BitBucketRepos/blankpluginGLFW/BlankPlugin/PluginSource/source/container2.png", &width, &height, &nrComponents, 0);
writeToLog("before data");
if (data) {
writeToLog("data contained!!!!!!");
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, data);
}
glUniform1i(glGetUniformLocation(shader_programme, "texture1"), 0);
//glfwCreateWindow(640, 480, "My Title", NULL, NULL);
glfwSetKeyCallback(newContext, key_callback);
}
else
{
writeToLog("not main thread");
}
}
Here is the function that gets called in a while loop:
extern "C" void UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API windowMainLoop()
{
writeToLog("render loop");
glfwMakeContextCurrent(newContext);
glClearColor(0.0f, 1.0f, 1.0f, 1.0f);
// loop until the window closed
if (!glfwWindowShouldClose(newContext)) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
if (std::this_thread::get_id() == MAIN_THREAD_ID)
{
// bind Texture
glBindTexture(GL_TEXTURE_2D, texture);
glUseProgram(shader_programme);
glBindVertexArray(vao);
// draw points 0-3 from the currently bound VAO with current in-use shader
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(newContext);
// poll the events
glfwPollEvents();
}
//switch back to old context
glfwMakeContextCurrent(oldContext);
writeToLog("finished render loop");
}
}
My "if (data)" statement results in the text begin written to the log so I know the image is being loaded properly but I cannot figure out why the triangle is black.
Any help would be greatly appreciated!
The issue is the setup arrays of generic vertex attribute data. Each attribute tuple consist of 5 components (x, y, z, u, v):
float points[] = {
0.0f, 0.5f, 0.0f, 0.5f, 1.0f,
0.5f, -0.5f, 0.0f, 1.0f, 0.0f,
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f
};
So the stride parameter has to be 5 * siezof(GLfloat) rather than 6 * siezof(GLfloat):
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
The 2nd parameter of glBufferData is the size of the buffer in bytes. Your buffer consists of 15 (5 * 3) elements of type GLfloat. So the size is 15 * sizeof(float) rather than 9 * sizeof(float):
glBufferData(GL_ARRAY_BUFFER, 15 * sizeof(float), points, GL_STATIC_DRAW);
Since the format of the file is .png the last parameter of stbi_load should be set 4, to ensure that a 4 channels of the texture are get:
unsigned char *data = stbi_load("?.png", &width, &height, &nrComponents, 4);
The format and internal format of glTexImage2D have to be GL_RGBA.
By default the texture minifying function (GL_TEXTURE_MIN_FILTER) is GL_NEAREST_MIPMAP_LINEAR (see glTexParameteri). Since you don't use mip mapping, the parameter has to be changed to GL_LINEAR:
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
I am trying to get the camera render texture from unity and display it on my opengl window triangle. I can get a regular texture to show up but the unity one results in a black screen.
Here is the function that runs once:
extern "C" void UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API createSimpleWindow()
{
if (std::this_thread::get_id() == MAIN_THREAD_ID)
{
newContext = glfwCreateWindow(640, 480, "window", NULL, oldContext);
glfwMakeContextCurrent(newContext);
//drawing triangle
// start GLEW extension handler
glewExperimental = GL_TRUE;
glewInit();
// tell GL to only draw onto a pixel if the shape is closer to the viewer
glEnable(GL_DEPTH_TEST); // enable depth-testing
glDepthFunc(GL_LESS); // depth-testing interprets a smaller value as "closer"
//triangle pts
float points[] = {
0.0f, 0.5f, 0.0f, 0.5f, 1.0f,
0.5f, -0.5f, 0.0f, 1.0f, 0.0f,
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f
};
GLuint vbo = 0;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, 15 * sizeof(float), points, GL_STATIC_DRAW);
vao = 0;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
GLuint vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs, 1, &vertex_shader, NULL);
glCompileShader(vs);
GLuint fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs, 1, &fragment_shader, NULL);
glCompileShader(fs);
shader_programme = glCreateProgram();
glAttachShader(shader_programme, fs);
glAttachShader(shader_programme, vs);
glLinkProgram(shader_programme);
glUseProgram(shader_programme);
////////////////////////////
//unity texture
void* textureDataPtr = s_CurrentAPI->BeginModifyTexture(g_TextureHandle, g_TextureWidth, g_TextureHeight, &textureRowPitch);
if (textureDataPtr) {
writeToLog("data contained from unity!!!!!!");
}
//regular texture
int width, height, nrComponents;
unsigned char *data = stbi_load("/Users/roma/Desktop/Escape Tech/BitBucketRepos/blankpluginGLFW/BlankPlugin/PluginSource/source/container2.png", &width, &height, &nrComponents, 4);
//set up texture
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
//glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, g_TextureWidth, g_TextureHeight, GL_RGBA, GL_UNSIGNED_BYTE, textureDataPtr);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, textureDataPtr);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glUniform1i(glGetUniformLocation(shader_programme, "texture1"), 0);
//s_CurrentAPI->EndModifyTexture(g_TextureHandle, g_TextureWidth, g_TextureHeight, textureRowPitch, textureDataPtr);
//////////////////////////////////
glfwSetKeyCallback(newContext, key_callback);
}
else
{
writeToLog("not main thread");
}
}
void* RenderAPI_OpenGLCoreES::BeginModifyTexture(void* textureHandle, int textureWidth, int textureHeight, int* outRowPitch)
{
const int rowPitch = textureWidth * 4;
// Just allocate a system memory buffer here for simplicity
unsigned char* data = new unsigned char[rowPitch * textureHeight];
*outRowPitch = rowPitch;
return data;
}
When I do a "if (textureDataPtr)" write to log it writes correctly so I know there is data it is just not showing up.
Any help would be greatly appreciated!
I am working on a project with openGL part. The scenegraph part is based on core OpenGL1.0 and still use glBegin and glEnd.
Now I am adding new stuff on it and has to use VAO. I am a beginner and use QOpenGLFunctions_3_0 because I cannot find gl3.h from openGL registry
The problem is now that I use GL_TEXTURE_2D with glBegin(GL_QUADS), it works,
and I draw single glDrawElements( GL_QUADS, 4, GL_UNSIGNED_BYTE, 0); it works too.
But when I use GL_TEXTURE_2D and glDrawElements( GL_QUADS... , the texture is not generated correctly. Please tell me the right way to use it.
Here is thd code
/** draws Texture */
class TextureNode
: public Node
{
public:
TextureNode( const cv::Mat& mat )
{
m_mat = mat;
}
TextureNode( const std::string& id, const cv::Mat& mat )
: Node( id )
{
m_mat = mat;
}
~TextureNode()
{
//glDeleteTextures( 1, &m_texture[0] );
}
void init( TraversalContext& context )
{
initializeOpenGLFunctions();
struct Vertex {
GLfloat position[3];
GLfloat texcoord[2];
GLfloat normal[3];
};
const int NUM_VERTS = 4;
const int NUM_INDICES = 4;
Vertex vertexdata[NUM_VERTS] = {
{{0, 0, 0}, {0,0}, {0,0,1}},
{{0, 100, 0}, {0,1}, {0,0,1}},
{{100, 100, 0}, {1,1}, {0,0,1}},
{{100, 0, 0}, {1,0}, {0,0,1}},
};
GLubyte indexdata[NUM_INDICES] = { 0, 1, 2, 3 };
// Create and bind a VAO
glGenVertexArrays(1, &m_quadVAO);
glBindVertexArray(m_quadVAO);
glGenBuffers(1, &m_quadPositionVBO);
glBindBuffer(GL_ARRAY_BUFFER, m_quadPositionVBO);
// copy data into the buffer object
glBufferData(GL_ARRAY_BUFFER, NUM_VERTS * sizeof(Vertex), vertexdata, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offsetof(Vertex, position));
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offsetof(Vertex, texcoord));
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offsetof(Vertex, normal));
glGenBuffers(1, &m_quadIndexVBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_quadIndexVBO);
// copy data into the buffer object
glBufferData(GL_ELEMENT_ARRAY_BUFFER, NUM_INDICES * sizeof(GLubyte), indexdata, GL_STATIC_DRAW);
// Create and bind a texture
glGenTextures(1, &m_texture); // Create The Texture
glBindTexture(GL_TEXTURE_2D, m_texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_mat.cols, m_mat.rows, 0, GL_BGR, GL_UNSIGNED_BYTE, m_mat.data);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP);
//////// At this point the VAO is set up with two vertex attributes
//////// referencing the same buffer object, and another buffer object
//////// as source for index data. We can now unbind the VAO, go do
//////// something else, and bind it again later when we want to render
//////// with it.
glBindTexture(GL_TEXTURE_2D, 0);
glBindVertexArray(NULL);
//////glBindBuffer(GL_ARRAY_BUFFER, NULL);
//glBindTexture( GL_TEXTURE_2D, NULL );
//////glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, NULL);
}
/** apply transformation */
void doWork( TraversalContext& context )
{
//QTime time;
//time.start();
initializeOpenGLFunctions();
glDisable( GL_LIGHTING );
glEnable(GL_TEXTURE_2D);
glBindVertexArray( m_quadVAO );
//glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glActiveTexture(GL_TEXTURE0 + m_texture - 1 );
glBindTexture(GL_TEXTURE_2D, m_texture);
glDrawElements( GL_QUADS, 4, GL_UNSIGNED_BYTE, 0);
//glBegin(GL_QUADS);
// glColor3d( 0,0,0 );
// glTexCoord2f(1.0f, 0.0f); glVertex2d( 0, 0 );
// glTexCoord2f(1.0f, 1.0f); glVertex2d( 0, m_mat.rows );
// glTexCoord2f(0.0f, 1.0f); glVertex2d( m_mat.cols, m_mat.rows );
// glTexCoord2f(0.0f, 0.0f); glVertex2d( m_mat.cols, 0 );
//glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
glBindVertexArray(0);
glEnable( GL_LIGHTING );
}
void destroy( TraversalContext& context )
{
glDeleteVertexArrays( 1, &m_quadVAO );
glDeleteTextures(1,&m_texture);
glDeleteBuffers(1, &m_quadPositionVBO);
glDeleteBuffers(1, &m_quadTexcoordVBO);
glDeleteBuffers(1, &m_quadIndexVBO);
}
protected:
GLuint m_quadVAO;
GLuint m_quadPositionVBO;
GLuint m_quadTexcoordVBO;
GLuint m_quadIndexVBO;
GLuint m_texture;
///** list of vertices */
GLfloat m_vertices[4][2];
cv::Mat m_mat;
};
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offsetof(Vertex, position));
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offsetof(Vertex, texcoord));
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (void*)offsetof(Vertex, normal));
This is using the generic vertex attributes, but that only makes sense when using sharers, and afaik don't work with the fixed-function pipeline. Either create a shader to use them, or use glVertexPointer, glTexCoordPointer, and/or glNormalPointer instead.
glActiveTexture(GL_TEXTURE0 + m_texture - 1 );
This is incorrect. glActiveTexture selects which texture unit to bind to; it does not use texture names at all. Since you're not using shaders, this should just be glActiveTexture(GL_TEXTURE0).
Side note: You don't need to use an index buffer for drawing a single quad; you can use glDrawArrays instead.
I have changed 3 GLFloat arrays (aka GLfloat[]) into vectors (aka vector) and my triangles stopped working! Here is my init function:
void initialize(GLuint &vao) {
// Use a Vertex Array Object
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
Projection = perspective(45.0f, 16.0f / 9.0f, 0.1f, 100.0f);
//ortho(-4.0f/3.0f, 4.0f/3.0f, -1.0f, 1.0f, -1.0f, 1.0f);
Model = translate(Model, vec3(0.f, 0.f, 0.f));
Model = rotate(Model, 45.0f, vec3(0.0f, 0.0f, 1.0f));
//pos + textures array
GLuint vboId;
glGenBuffers(1, &vboId);
glBindBuffer(GL_ARRAY_BUFFER, vboId);
glBufferData(GL_ARRAY_BUFFER, (vertices_position.size() + texture_coord.size() ) * sizeof(GLfloat), NULL, GL_DYNAMIC_DRAW);
glBufferSubData(GL_ARRAY_BUFFER, 0, vertices_position.size() * sizeof(GLfloat), &vertices_position[0]);
glBufferSubData(GL_ARRAY_BUFFER, vertices_position.size() * sizeof(GLfloat), texture_coord.size() * sizeof(GLfloat), &texture_coord[0]);
//Indices
GLuint eabId;
glGenBuffers(1, &eabId);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, eabId);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(GLfloat), &indices[0], GL_DYNAMIC_DRAW);
//Textures
GLuint textureId;
glGenTextures(1, &textureId);
glBindTexture(GL_TEXTURE_2D, textureId);
load_image("../squirrel.jpg");
shaderProgram = create_program("../shaders/vert.shader", "../shaders/frag.shader");
//Position attribute
positionAttrId = glGetAttribLocation(shaderProgram, "position");
glVertexAttribPointer(positionAttrId, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(positionAttrId);
//Texture attribute
textureCoordId = glGetAttribLocation(shaderProgram, "texture_coord");
glVertexAttribPointer(textureCoordId, 2, GL_FLOAT, GL_FALSE, 0, (GLvoid *)(vertices_position.size() * sizeof(GLfloat)));
glEnableVertexAttribArray(textureCoordId);
modelId = glGetUniformLocation(shaderProgram, "Model");
cameraId = glGetUniformLocation(shaderProgram, "Camera");
}
And below is my render function:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(shaderProgram);
glUniformMatrix4fv(modelId, 1, GL_FALSE, glm::value_ptr(Model));
glUniformMatrix4fv(cameraId, 1, GL_FALSE, glm::value_ptr(cam.matrix()));
//VAO
glBindVertexArray(vao);
//glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glUseProgram(0);
You are using floating-point elements in your Index Buffer Object, that is not a valid data type.
OpenGL requires either GLubyte (not hardware accelerated on most desktop GPUs), GLushort or GLuint indices.
For best performance, you should use an array of GLushort and pass GL_UNSIGNED_SHORT as the element type in your call to glDrawElements (...).