Rotated GLSL texture is pixelated - c++

I've made a program that displays some textures depending on the given input and can rotate, resize and move the textures depending on the given input. The program works perfectly fine but when I rotate the textures the edges appear to be pixelated, like this:
Is there a way for me to smoothen out these edges?
Here are my shaders and texture classes:
texture.vs:
#version 300 es
precision mediump float;
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aColor;
layout (location = 2) in vec2 aTexCoord;
out vec3 ourColor;
out vec2 TexCoord;
uniform mat4 transform;
void main()
{
gl_Position = transform * vec4(aPos, 1.0f);
ourColor = aColor;
TexCoord = vec2(aTexCoord.x, aTexCoord.y);
}
texture.fs
#version 300 es
precision mediump float;
out vec4 FragColor;
in vec3 ourColor;
in vec2 TexCoord;
// texture sampler
uniform sampler2D texture1;
void main()
{
FragColor = texture(texture1, TexCoord);
}
Here is my texture.cpp:
Texture::Texture(int x, int y, int w, int h, int gw, int gh)
{
pos.TL_x = _VERTICIZE_X(x, gw);
pos.TL_y = -_VERTICIZE_Y(y, gh);
pos.TR_x = _VERTICIZE_X(x + w, gw);
pos.TR_y = -_VERTICIZE_Y(y, gh);
pos.BL_x = _VERTICIZE_X(x, gw);
pos.BL_y = -_VERTICIZE_Y(y + h, gh);
pos.BR_x = _VERTICIZE_X(x + w, gw);
pos.BR_y = -_VERTICIZE_Y(y + h, gh);
}
void set_max_z(int z)
{
max_z = z;
}
int Texture::init(float ang_rad, float z)
{
shader = Shader("./src/opengl/shaders/image.vs", "./src/opengl/shaders/image.fs");
this->angle = ang_rad;
// build and compile our shader zprogram
// ------------------------------------
// set up vertex data (and buffer(s)) and configure vertex attributes
// ------------------------------------------------------------------
float vertices[32] = {
// positions // colors // texture coords
pos.TR_x, pos.TR_y, _VERTICIZE_Z(z, max_z), 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, // top right
pos.BR_x, pos.BR_y, _VERTICIZE_Z(z, max_z), 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, // bottom right
pos.BL_x, pos.BL_y, _VERTICIZE_Z(z, max_z), 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, // bottom left
pos.TL_x, pos.TL_y, _VERTICIZE_Z(z, max_z), 1.0f, 1.0f, 1.0f, 0.0f, 0.0f // top left
};
unsigned int indices[] = {
0, 1, 3, // first triangle
1, 2, 3 // second triangle
};
glEnable(GL_DEPTH_TEST);
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
// position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void *)0);
glEnableVertexAttribArray(0);
// color attribute
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void *)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
// texture coord attribute
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void *)(6 * sizeof(float)));
glEnableVertexAttribArray(2);
// load and create a texture
// -------------------------
// texture 1
// ----------
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
// tell opengl for each sampler to which texture unit it belongs to (only has to be done once)
// -------------------------------------------------------------------------------------------
shader.use(); // don't forget to activate/use the shader before setting uniforms!
// either set it manually like so:
glUniform1i(glGetUniformLocation(shader.ID, "texture"), 0);
return 0;
}
void Texture::render(int w, int h, uint8_t *buffer)
{
// If having trouble doing multiple textures, add "+ index to Active Texture."
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
glGenerateMipmap(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glm::mat4 transform = glm::mat4(1.0f); // make sure to initialize matrix to identity matrix first
transform = glm::translate(transform, glm::vec3(0.0f, 0.0f, 0.0f));
transform = glm::rotate(transform, glm::radians(this->angle), glm::vec3(0.0f, 0.0f, 1.0f));
// get matrix's uniform location and set matrix
shader.use();
unsigned int transformLoc = glGetUniformLocation(shader.ID, "transform");
glUniformMatrix4fv(transformLoc, 1, GL_FALSE, glm::value_ptr(transform));
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
}
Texture::~Texture()
{
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteBuffers(1, &EBO);
}
void Texture::framebuffer_size_callback(GLFWwindow *window, int width, int height)
{
// make sure the viewport matches the new window dimensions; note that width and
// height will be significantly larger than specified on retina displays.
glViewport(0, 0, width, height);
}
(_VERTICIZE just calculates points)
I'm using ubuntu, GLAD, GLFW.

This appears to be a classic example of Aliasing. We can fix this by enabling MSAA.
GLFW has an inbuilt way of doing that. Put this before you create the window:
glfwWindowHint(GLFW_SAMPLES, 4); /* if effect is not strong enough, change to 8 */
Edit -
You have to call glfwWindowHint(GLFW_SAMPLES, 4) in between glfwInit and glfwCreateWindow.
Also, in case OpenGL doesn't enable multisampling by default, do
glEnable(GL_MULTISAMPLE);

Related

STB Loading png to OpenGl Texture results in black or white backgroud

Hello I've got weird problem with my code, i've been looking for some time on stack and found no answer, there was few similar problems, but they didn't solve anything.
int createTexture(const char* path) {
unsigned int texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
int width, height, nrChannels;
unsigned char* data = stbi_load(path, &width, &height, &nrChannels, 0);
int colorMode = nrChannels == 3 ? GL_RGB : GL_RGBA;
if (data) {
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, colorMode, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
} else {
std::cout << "Failed to load texture" << std::endl;
}
stbi_image_free(data);
return texture;
}
Here i have function loading texture, I choose GL_RGB or GL_RGBA depending on number of channels. I've good results of 3 and 4 channels depending on jpg/png image i use. Anyways i have white background, ( it was black before i edited it in GIMP )
// set the texture wrapping/filtering options (on currently bound texture)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// load and create a texture
// -------------------------
unsigned int texture1 = createTexture("brick.jpg");
unsigned int texture2 = createTexture("grafiti.png");
// Mode
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
// Shader Class
Shader currentShader("vertexShader.glsl", "fragmentShader.glsl");
currentShader.use();
currentShader.setInt("texture1", 0);
currentShader.setInt("texture2", 1);
while (!glfwWindowShouldClose(window)) {
processInput(window);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture1);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, texture2);
currentShader.use();
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLES, sizeof(indices) / sizeof(int), GL_UNSIGNED_INT, 0);
glBindVertexArray(0);
glfwSwapBuffers(window);
glfwPollEvents();
}
Then I use this textures together with this shaders
Fragment
#version 330 core
out vec4 FragColor;
in vec3 ourColor;
in vec2 TexCoord;
uniform sampler2D texture1;
uniform sampler2D texture2;
void main()
{
FragColor = mix(texture(texture1, TexCoord), texture(texture2, TexCoord), 1);
}
Vertex
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aColor;
layout (location = 2) in vec2 aTexCoord;
out vec3 ourColor;
out vec2 TexCoord;
void main()
{
gl_Position = vec4(aPos, 1.0);
ourColor = aColor;
TexCoord = aTexCoord;
}
float vertices[] = {
// positions // colors // texture coords
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, // top right
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // bottom right
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, // bottom left
-0.5f, 0.5f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f // top left
};
unsigned int indices[] = {
0, 1, 3,
1, 2, 3,
};
unsigned int VBO;
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
unsigned int VAO;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
unsigned int EBO;
glGenBuffers(1, &EBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(6 * sizeof(float)));
glEnableVertexAttribArray(2);
I'm sorry if my english is hideous but it's middle of the night and i'm struggling with this for hours right now
I found solution, enabling blend was not satisfying so i wrote fragmentShader in different way
#version 330 core
out vec4 FragColor;
in vec3 ourColor;
in vec2 TexCoord;
uniform sampler2D texture1;
uniform sampler2D texture2;
void main()
{
vec4 AlphaColor = texture(texture2, TexCoord);
if(AlphaColor.a < 0.1) {
FragColor = texture(texture1, TexCoord);
} else {
FragColor = mix(texture(texture1, TexCoord), AlphaColor, 0.9);
}
}
Right now FragmentShader understands alpha channel and will mix only pixels without alpha

How to clip texture in OpenGL?

I need to crop the image. But in opencv, it will create error because of integer domain. So I want to crop every single unit in float domain to reduce the offset error. Maybe openGL can help me, but I'm not good at it.
The image just like this ( but not this one ):
I want every black and white unit, then save them in cv::Mat.
I guess maybe shader or loop or something else can repeat to crop all block, then save them.
Now I can get one single unit. Like this:
glfwInit();//initalization
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow* window = glfwCreateWindow(100, 100, "LearnOpenGL", nullptr, nullptr);
if (window == nullptr)
{
cout << "Failed to create GLFW window" << endl;
glfwTerminate();
return false;
}
glfwMakeContextCurrent(window);
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
{
std::cout << "Failed to initialize GLAD" << std::endl;
return false;
}
Shader ourShader("vertex.vs.glsl", "fragment.fs.glsl");
float unitWidth = moduleLength / (float)tex.cols;
float startP_x = 100.0f / (float)tex.cols + unitWidth * 2;
float startP_y = 104.0f / (float)tex.rows + unitWidth * 2;
float vertices[] = {
// positions // texture coords (note that we changed them to 'zoom in' on our texture image)
0.5f, 0.5f, 0.0f, startP_x + unitWidth, startP_y, // top right
0.5f, -0.5f, 0.0f, startP_x + unitWidth, startP_y + unitWidth, // bottom right
-0.5f, -0.5f, 0.0f, startP_x, startP_y + unitWidth, // bottom left
-0.5f, 0.5f, 0.0f, startP_x, startP_y // top left
};
unsigned int indices[] = {
0, 1, 3, // first triangle
1, 2, 3 // second triangle
};
unsigned int VBO, VAO, EBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
// position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
// texture coord attribute
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
// texture
// ---------
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
//// set the texture wrapping parameters
// set the texture wrapping parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER);
// set texture filtering parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
GLubyte* pixels;
int width = tex.cols;
int height = tex.rows;
int pixellength = width * height*tex.channels();
pixels = new GLubyte[pixellength];
memcpy(pixels, tex.data, pixellength * sizeof(unsigned char));
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_BGR, GL_UNSIGNED_BYTE, pixels);
free(pixels);
while (!glfwWindowShouldClose(window))
{
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// bind Texture
glBindTexture(GL_TEXTURE_2D, texture);
// draw our first triangle
ourShader.use();
// create transformations
glm::mat4 model = glm::mat4(1.0f); // make sure to initialize matrix to identity matrix first
glm::mat4 view = glm::mat4(1.0f);
glm::mat4 projection = glm::mat4(1.0f);
model = glm::rotate(model, glm::radians(0.0f), glm::vec3(1.0f, 0.0f, 0.0f));
view = glm::translate(view, glm::vec3(0.0f, 0.0f, -1.1f));
projection = glm::perspective(glm::radians(45.0f), (float)SCR_WIDTH / (float)SCR_HEIGHT, 0.1f, 100.0f);
// retrieve the matrix uniform locations
unsigned int modelLoc = glGetUniformLocation(ourShader.ID, "model");
unsigned int viewLoc = glGetUniformLocation(ourShader.ID, "view");
unsigned int transformLoc = glGetUniformLocation(ourShader.ID, "projection");
glUniformMatrix4fv(modelLoc, 1, GL_FALSE, glm::value_ptr(model));
glUniformMatrix4fv(viewLoc, 1, GL_FALSE, &view[0][0]);
glUniformMatrix4fv(transformLoc, 1, GL_FALSE, glm::value_ptr(projection));
glBindVertexArray(VAO); // seeing as we only have a single VAO there's no need to bind it every time, but we'll do so to keep things a bit more organized
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0); // set the count to 6 since we're drawing 6 vertices now (2 triangles); not 3!
glfwPollEvents();
glfwSwapBuffers(window);
}
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteBuffers(1, &EBO);
glfwTerminate();
return true;
vetex shader:
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec2 aTexCoord;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
out vec2 TexCoord;
void main()
{
gl_Position = projection * view * model *vec4(aPos, 1.0);
TexCoord = vec2(aTexCoord.x, aTexCoord.y);
}
fragment shader:
#version 330 core
out vec4 FragColor;
in vec2 TexCoord;
uniform sampler2D texture1;
void main()
{
FragColor = texture(texture1, TexCoord);
}

Textures not drawing OpenGL

I am having some strange behavior when trying to draw a texture in OpenGL. Currently all this program does for me is draw the background color with no indication of a texture being drawn. I have just moved from Visual Studio (where this code produces the correct output) to compiling in the command prompt. This code should color the background and draw one texture in the center of the screen.
I am concerned that I may have supplied the incorrect libraries for compilation since as far as I am concerned everything I am doing is the same. Different libraries, however, always said that they were incompatible.
Main code:
#define GLEW_STATIC
#include <GL/glew.h> // window management library
#include <GL/glfw3.h>
#include <GL/glm.hpp>
#include <GL/gtc/matrix_transform.hpp> //
#define STB_IMAGE_IMPLEMENTATION
#include "stb_image.h"
int main(int argc, char** argv){
//Initialize GLFW and GLEW...
//Setup and combine shaders...
GLint vertex_att = glGetAttribLocation(program, "vertex");
glVertexAttribPointer(vertex_att, 2, GL_FLOAT, GL_FALSE, 7*sizeof(GLfloat), 0);
glEnableVertexAttribArray(vertex_att);
GLint color_att = glGetAttribLocation(program, "color");
glVertexAttribPointer(color_att, 3, GL_FLOAT, GL_FALSE, 7*sizeof(GLfloat), (void *) (2 *sizeof(GLfloat)));
glEnableVertexAttribArray(color_att);
GLint tex_att = glGetAttribLocation(program, "uv");
glVertexAttribPointer(tex_att, 2, GL_FLOAT, GL_FALSE, 7*sizeof(GLfloat), (void *) (5 *sizeof(GLfloat)));
glEnableVertexAttribArray(tex_att);
glUseProgram(program);
GLuint texture;
glGenTextures(1, &texture);
setthisTexture(texture, "./black.png");
// Create geometry of the square
int size = CreateSquare();
while (!glfwWindowShouldClose(window)){
// Clear background
glClearColor(viewport_background_color_g[0],
viewport_background_color_g[1],
viewport_background_color_g[2], 0.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//set displacement - 'program' being the shader program
int matrixLocation = glGetUniformLocation(program, "x");
glm::mat4 translate = glm::mat4();
translate = glm::translate(translate, glm::vec3(0.0f, 0.0f, 0.0f));
glUniformMatrix4fv(matrixLocation, 1, GL_FALSE, &translate[0][0]);
glBindTexture(GL_TEXTURE_2D, texture);
glDrawElements(GL_TRIANGLES, size, GL_UNSIGNED_INT, 0);
glfwPollEvents();
glfwSwapBuffers(window);
}
}
Vertex Shader:
#version 130
in vec2 vertex;
in vec3 color;
in vec2 uv;
out vec2 uv_interp;
// Uniform (global) buffer
uniform mat4 x;
// Attributes forwarded to the fragment shader
out vec4 color_interp;
void main(){
vec4 t;
t = vec4(vertex, 0.0, 1.0);
gl_Position = x*t;
color_interp = vec4(color, 1.0);
uv_interp = uv;
}
Fragment Shader:
#version 130
in vec4 color_interp;
in vec2 uv_interp;
uniform sampler2D onetex;
void main(){
vec4 color = texture2D(onetex, uv_interp);
gl_FragColor = vec4(color.r,color.g,color.b,color.a);
if(gl_FragColor.a < 0.9){
discard;
}
}
setthisTexture:
void setthisTexture(GLuint w, const char *fname)
{
glBindTexture(GL_TEXTURE_2D, w);
int width, height, nrChannels;
unsigned char* image = stbi_load(fname, &width, &height, &nrChannels, 0);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
stbi_image_free(image);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
}
CreateSquare:
int CreateSquare(void) {
// The face of the square is defined by four vertices and two triangles
// Number of attributes for vertices and faces
// const int vertex_att = 7; // 7 attributes per vertex: 2D (or 3D) position (2), RGB color (3), 2D texture coordinates (2)
// const int face_att = 3; // Vertex indices (3)
GLfloat vertex[] = {
// square (two triangles)
// Position Color Texcoords
-0.5f, 0.5f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, // Top-left
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // Top-right
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, // Bottom-right
-0.5f, -0.5f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f // Bottom-left
};
GLuint face[] = {
0, 1, 2, // t1
2, 3, 0 //t2
};
GLuint vbo, ebo;
// Create buffer for vertices
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertex), vertex, GL_STATIC_DRAW);
// Create buffer for faces (index buffer)
glGenBuffers(1, &ebo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ebo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(face), face, GL_STATIC_DRAW);
// Return number of elements in array buffer
return sizeof(face);
}
For the use of glVertexAttribPointer either a named GL_ARRAY_BUFFER buffer object has to be bound or a pointer to the vertex data has to be passed.
In your case this means, that
int size = CreateSquare();
has to be done before
GLint vertex_att = glGetAttribLocation(program, "vertex");
glVertexAttribPointer(vertex_att, 2, GL_FLOAT, GL_FALSE, 7*sizeof(GLfloat), 0);
glEnableVertexAttribArray(vertex_att);
.....
Note in the function CreateSquare, the named buffer object vbo is bound:
glBindBuffer(GL_ARRAY_BUFFER, vbo);
which is used by the glVertexAttribPointer calls.
See OpenGL 4.6 API Compatibility Profile Specification; 10.3.9 Vertex Arrays in Buffer Objects; page 409:
A buffer object binding point is added to the client state associated with each
vertex array type and index. The commands that specify the locations and organizations
of vertex arrays copy the buffer object name that is bound to ARRAY_-
BUFFER to the binding point corresponding to the vertex array type or index being
specified. For example, the VertexAttribPointer command copies the value of
ARRAY_BUFFER_BINDING (the queriable name of the buffer binding corresponding
to the target ARRAY_BUFFER) to the client state variable VERTEX_ATTRIB_-
ARRAY_BUFFER_BINDING for the specified index.

opengl 2d game only drawing second sprite texture and not first

I'm currently creating a 2d fighting game utilizing OpenGL and I have run into a problem where OpenGL is only drawing at my last initialized sprite position, regardless of how many sprites I initialize and try and draw. Even when I initialize a sprite1 and sprite2 but only draw sprite1, sprite is what will still be drawn. It starts with my sprite class where I initialize where on the screen I want my image as well as what image to use:
Sprite.cpp
Init(int screenCoordinateX, int screenCoordinateY, uint imageWidth, unsigned int imageHeight, std::string imageFilePath)
{
//casting to float since GLSL shader variables vec2,3,4 require vertex data to be in floats
this->x = static_cast<float>(x);
this->y = static_cast<float>(y);
this->width = static_cast<float>(width);
this->height = static_cast<float>(height);
glGenBuffers(1, &vboID);
Blz::Graphics::GLTexture texture(imageFilePath);
this->texture = texture;
float halfWidth = this->width / 2;
//Setting sprite origin at bottom middle of image by subtracting half width
this->vertexData.at(0).SetPosition(glm::vec3{ this->x + (this->width - halfWidth), this->y + this->height, 0.0f });//Top right corner
this->vertexData.at(1).SetPosition(glm::vec3{ this->x - halfWidth, this->y + height, 0.0f });//Top left corner
this->vertexData.at(2).SetPosition(glm::vec3{ this->x - halfWidth, this->y, 0.0f });//Bottom left corner
this->vertexData.at(3).SetPosition(glm::vec3{ this->x - halfWidth, this->y, 0.0f });//Bottom left corner
this->vertexData.at(4).SetPosition(glm::vec3{ this->x + (this->width - halfWidth), this->y, 0.0f });//Bottom right corner
this->vertexData.at(5).SetPosition(glm::vec3{ this->x + (this->width - halfWidth), this->y + this->height, 0.0f });//Top right corner
this->vertexData.at(0).SetUV(glm::vec2{ 1.0f, 1.0f });
this->vertexData.at(1).SetUV(glm::vec2{ 0.0f, 1.0f });
this->vertexData.at(2).SetUV(glm::vec2{ 0.0f, 0.0f });
this->vertexData.at(3).SetUV(glm::vec2{ 0.0f, 0.0f });
this->vertexData.at(4).SetUV(glm::vec2{ 1.0f, 0.0f });
this->vertexData.at(5).SetUV(glm::vec2{ 1.0f, 1.0f });
glBindBuffer(GL_ARRAY_BUFFER, vboID);
glBufferData(GL_ARRAY_BUFFER, (sizeof(Vector3D) * this->vertexData.size()), &this->vertexData.front(), GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vector3D), (void*)offsetof(Vector3D, position));
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vector3D), (void*)offsetof(Vector3D, textureCoordinates));
//Unbind
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
I then try and render sprites passed in to renderer like so:
Renderer.cpp
void Renderer::Draw(Sprite& sprite)
{
glm::mat4 orthoProjection = glm::ortho(0.0f, static_cast<sfloat>(1024), 0.0f, static_cast<sfloat>(768));
GLuint transformationMatrixUniformLocation = this->shaderProgram.GetUniformLocation("transformationMatrix");
glUniformMatrix4fv(transformationMatrixUniformLocation, 1, GL_FALSE, &(orthoProjection[0][0]));
glBindTexture(GL_TEXTURE_2D, sprite.texture.id);
glBindBuffer(GL_ARRAY_BUFFER, sprite.vboID);
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
I here is my main.cpp where I begin to call everything:
int main()
{
Blz::Graphics::Renderer renderer;
Blz::Window window;
Blz::Input input;
Scene scene;
window.Initialize();
renderer.Init();
Sprite sprite1;
sprite.Init(900, 300, 200, 200, "CharImage.png");
Sprite sprite2;
sprite2.Init(100, 100, 200, 200, "CharImage.png");
while (!input.IsKeyPressed(SDLK_ESCAPE))
{
window.ClearBuffers();
renderer.Draw(sprite1);
window.SwapBuffers();
}
return 0;
}
So even though I ask for sprite1 to be drawn, only sprite2's position at 100x 100y gets drawn to the screen. Even if I manually try and enter a vboID of 1 (which is the vboID of sprite1) within renderer.cpp, it still draws sprite2's position. What am I doing wrong?
Here are my Shaders if necessary:
VertexShader.glsl
#version 430
in vec3 vertexPosition;
in vec2 textCoord;
out vec2 TextureCoord;
uniform mat4 transformationMatrix;
void main()
{
vec4 position = vec4(vertexPosition, 1.0f);
gl_Position = transformationMatrix * position;
TextureCoord = textCoord;
};
FragmentShader.glsl
#version 430
out vec4 daColor;
in vec2 TextureCoord;
uniform sampler2D basicTexture;
void main()
{
vec4 texel = texture(basicTexture, TextureCoord);
daColor = texel;
};
So I guess this is a lesson for anyone aspiring to learn and use OpenGL. Anytime you are using VBOs and vbo ids to bind an OpenGL buffer to you also need to again specify your vertexattribpointers before drawing. So my new renderer.cpp file looks like this:
void Renderer::Draw(Sprite& sprite)
{
glm::mat4 orthoProjection = glm::ortho(0.0f, static_cast<sfloat>(1024), 0.0f, static_cast<sfloat>(768));
GLuint transformationMatrixUniformLocation = this->shaderProgram.GetUniformLocation("transformationMatrix");
glUniformMatrix4fv(transformationMatrixUniformLocation, 1, GL_FALSE, &(orthoProjection[0][0]));
glBindTexture(GL_TEXTURE_2D, sprite.texture.id);
glBindBuffer(GL_ARRAY_BUFFER, sprite.vboID);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(Vector3D), (void*)offsetof(Vector3D, position));
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, sizeof(Vector3D), (void*)offsetof(Vector3D, textureCoordinates));
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
This other SO answer gives a reason as to why: is VertexAttribPointer needed after each BindBuffer?. To avoid this, you would use the newer VAO's which store vertex attribute info so you don't have to specify vertexattribpointer functions everytime.

Cube is not rendering in OpenGL

I'm trying to write a basic OpenGL 3.3 program with shaders, buffers, etc. drawing a cube. The problem is that the cube is not drawn. Sorry for such an amout of code, but i feel like the error might be anywhere, because to me it all seems right: display function is looping, shaders are compiled, matrices are passed to shaders. I suspecting that something might be wrong with culling. Please take a look. Here is the code (I'm using freeglut, first init() is called, then display runs in a loop):
initialization code:
struct ProgramData
{
GLuint theProgram;
GLuint iModel;
GLuint iView;
GLuint iProjection;
};
ProgramData shaderProgram;
ProgramData LoadProgram(const std::string &strVertexShader,
const std::string &strFragmentShader)
{
std::vector<GLuint> shaderList;
shaderList.push_back(LoadShader(GL_VERTEX_SHADER, strVertexShader));
shaderList.push_back(LoadShader(GL_FRAGMENT_SHADER, strFragmentShader));
ProgramData data;
data.theProgram = CreateProgram(shaderList);
data.iModel = glGetUniformLocation(data.theProgram, "mModel");
data.iView = glGetUniformLocation(data.theProgram, "mView");
data.iProjection = glGetUniformLocation(data.theProgram, "mProjection");
return data;
}
float cube_vertices[] = {
-1.0f, -1.0f, 1.0f,
1.0f, -1.0f, 1.0f,
1.0f, 1.0f, 1.0f,
-1.0f, 1.0f, 1.0f,
-1.0f, -1.0f, -1.0f,
1.0f, -1.0f, -1.0f,
1.0f, 1.0f, -1.0f,
-1.0f, 1.0f, -1.0f,
GREEN_COLOR,
BLUE_COLOR,
RED_COLOR,
BROWN_COLOR,
GREEN_COLOR,
BLUE_COLOR,
RED_COLOR,
BROWN_COLOR,
};
GLubyte cube_elements[] = {
0,1,2, 2,3,0,
0,3,4, 4,5,0,
0,5,6, 6,1,0,
1,6,7, 7,2,1,
7,4,3, 3,2,7,
4,7,6, 6,5,4
};
void InitializeProgram()
{
//initialize vertex buffer
glGenBuffers(1, &vertex_buffer_obj);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer_obj);
glBufferData(GL_ARRAY_BUFFER, sizeof(cube_vertices),
cube_vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//initialize index buffer
glGenBuffers(1, &index_buffer_obj);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, index_buffer_obj);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(cube_elements),
cube_elements, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
shaderProgram = LoadProgram("shader.vert", "shader.frag");
}
void init()
{
InitializeProgram();
int numberOfVertices = 8;
size_t color_data_offset = sizeof(float) * 3 * numberOfVertices;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer_obj);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, 0,
(void*)color_data_offset);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, index_buffer_obj);
glBindVertexArray(0);
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
glFrontFace(GL_CW);
glEnable(GL_DEPTH_TEST);
glDepthMask(GL_TRUE);
glDepthFunc(GL_LEQUAL);
glDepthRange(0.0f, 1.0f);
}
vertex shader:
#version 330
layout (location = 0) in vec3 inPosition;
layout (location = 1) in vec3 color;
uniform mat4 mProjection;
uniform mat4 mView;
uniform mat4 mModel;
smooth out vec3 theColor;
void main()
{
gl_Position = mProjection * mView * mModel * vec4(inPosition, 1);
theColor = color;
}
fragment shader:
#version 330
smooth in vec3 theColor;
out vec4 outputColor;
void main()
{
outputColor = vec4(theColor, 1);
}
draw code:
glm::vec3 cam_pos(3, 2, 3);
void display()
{
glClearColor(0.3f, 0.3f, 0.3f, 1.0f);
glClearDepth(1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(shaderProgram.theProgram);
glm::mat4 model_matrix = glm::translate(glm::vec3(0, 0, 0));
glm::mat4 view_matrix = glm::lookAt(cam_pos,
glm::vec3(0, 0, 0), glm::vec3(0, 0, 1));
glm::mat4 proj_matrix = glm::perspective(45.0f, 1.0f, 1.0f, 100.0f);
glUniformMatrix4fv(shaderProgram.iProjection, 1,
GL_FALSE, glm::value_ptr(proj_matrix));
glUniformMatrix4fv(shaderProgram.iView, 1,
GL_FALSE, glm::value_ptr(view_matrix));
glUniformMatrix4fv(shaderProgram.iModel, 1,
GL_FALSE, glm::value_ptr(model_matrix));
glBindVertexArray(vao);
int size; glGetBufferParameteriv(GL_ELEMENT_ARRAY_BUFFER,
GL_BUFFER_SIZE, &size);
glDrawElements(GL_TRIANGLES, size / sizeof(GLubyte), GL_UNSIGNED_SHORT, 0);
glBindVertexArray(0);
glUseProgram(0);
glutSwapBuffers();
glutPostRedisplay();
}
UPD: in init method when an offset for colors is calculated should be
sizeof(float) * 3 * numberOfVertices instead of
sizeof(GLubyte) * 3 * numberOfVertices, colors are stored as floats. Rendering problem not solved.
SOLVED: thank you for help. See my answer below.
On thing I did spot when glancing over your code is the following line from your vertex shader:
gl_Position = mProjection * mView * mModel * vec4(inPosition, 0);
That 0 should really be a 1.0.
In display() function
glDrawElements(GL_TRIANGLES, size / sizeof(GLubyte), GL_UNSIGNED_SHORT, 0);
should be changed to
glDrawElements(GL_TRIANGLES, size / sizeof(GLubyte), GL_UNSIGNED_BYTE, 0);
and in init()
glFrontFace(GL_CW);
to
glFrontFace(GL_CCW);
So the problem was that I passed to OpenGL incorrect data. Index array is of GLUbyte (1byte size each array element) but I for some reason decided it was GLushort (2bytes).
edit: doesn't matter a lot, but up vector (when generating camera matrix) should be not glm::vec3(0, 0, 1) but glm::vec3(0, 1, 0)