I'm trying to create a 2D game using OpenGL. My helper libraries are GLFW, GLEW, SOIL (for images) and glm (for maths).
I've managed to draw a rectangle with a very simple texture on it. But when I try to move this rectangle it frequently has a little flicker (the timer between flickers is almost always the same), and the faster I move it, the more visible it becomes.
Another problem I have is I'm working on a laptop, and it renders fine with my integrated graphics (fine as in it works, it still stutters) but when I execute my program with my Nvidia graphics card it just shows my clearcolor, and nothing else, which is extremely odd. My sprite translation happens in the runCallback code (called in the main loop) and is just a multiplication of matrices. The result matrix is then fetched and used in the draw code. In the drawCallback theres just the DrawSprite function being called. Also I should note I'm using OpenGL 3.3 .
I'm sorry in advance for the rather large amount of code, but after extensive testing and trying a multitude of things i have no idea where my mistake lies...
If you would like to help me but need any more information, i will provide.
UPDATE:
Problem with Nvidia graphics card resolved, it was due to a wrong uniform parameter. But the stutter remains.
IMAGE LOADING CODE
SD_Texture::SD_Texture(const char * fileName)
{
texture = new GLuint;
unsigned char* data = SOIL_load_image(fileName, &width, &height, 0, SOIL_LOAD_RGB);
glGenTextures(1, (GLuint*)texture);
glBindTexture(GL_TEXTURE_2D, *((GLuint*)(texture)));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, GL_RGB, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0);
SOIL_free_image_data(data);
}
void SD_Texture::Bind()
{
glBindTexture(GL_TEXTURE_2D, *(GLuint*)texture);
}
VAO SETUP CODE
void SD_Window::SetupVAO()
{
GLfloat vertices[] =
{
-0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f,
0.5f, -0.5, 0.0f, 1.0, 0.0f,
-0.5f, 0.5f, 0.0f, 0.0f, 1.0f,
0.5f, -0.5f, 0.0f, 1.0f, 0.0f,
0.5f, 0.5f, 0.0f, 1.0f, 1.0f
};
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(GL_FLOAT), (GLvoid*)nullptr);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(GL_FLOAT), (GLvoid*)(3 * sizeof(GLfloat)));
glBindVertexArray(0);
}
DRAW CODE
void SD_Window::DrawSprite(SD_Sprite * sprite)
{
glActiveTexture(GL_TEXTURE0);
sprite->GetTexture()->Bind();
glUniformMatrix4fv(transformUniform, 1, GL_FALSE, glm::value_ptr(ortho * sprite->GetTransform()));
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindVertexArray(0);
glBindTexture(GL_TEXTURE_2D, 0);
}
MAIN LOOP CODE
void SD_Window::TakeControl(void (*runCallback)(float delta), void (*drawCallback)())
{
double currentTime;
double oldTime = 0.0f;
while (!ShouldClose())
{
currentTime = glfwGetTime();
glClear(GL_COLOR_BUFFER_BIT);
drawCallback();
glfwSwapBuffers(window);
runCallback(currentTime - oldTime);
oldTime = currentTime;
glfwPollEvents();
}
glfwDestroyWindow(window);
glfwTerminate();
}
VERTEX SHADER
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec2 texCoord;
out vec2 sCoord;
uniform mat4 transform;
void main()
{
gl_Position = transform * vec4(position, 1.0f);
sCoord = vec2(texCoord.x, 1.0f - texCoord.y);
}
FRAGMENT SHADER
#version 330 core
in vec2 sCoord;
out vec4 color;
uniform sampler2D sTexture;
void main()
{
color = texture(sTexture, sCoord);
}
Related
Hello I've got weird problem with my code, i've been looking for some time on stack and found no answer, there was few similar problems, but they didn't solve anything.
int createTexture(const char* path) {
unsigned int texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
int width, height, nrChannels;
unsigned char* data = stbi_load(path, &width, &height, &nrChannels, 0);
int colorMode = nrChannels == 3 ? GL_RGB : GL_RGBA;
if (data) {
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, width, height, 0, colorMode, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
} else {
std::cout << "Failed to load texture" << std::endl;
}
stbi_image_free(data);
return texture;
}
Here i have function loading texture, I choose GL_RGB or GL_RGBA depending on number of channels. I've good results of 3 and 4 channels depending on jpg/png image i use. Anyways i have white background, ( it was black before i edited it in GIMP )
// set the texture wrapping/filtering options (on currently bound texture)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// load and create a texture
// -------------------------
unsigned int texture1 = createTexture("brick.jpg");
unsigned int texture2 = createTexture("grafiti.png");
// Mode
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
// Shader Class
Shader currentShader("vertexShader.glsl", "fragmentShader.glsl");
currentShader.use();
currentShader.setInt("texture1", 0);
currentShader.setInt("texture2", 1);
while (!glfwWindowShouldClose(window)) {
processInput(window);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture1);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, texture2);
currentShader.use();
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLES, sizeof(indices) / sizeof(int), GL_UNSIGNED_INT, 0);
glBindVertexArray(0);
glfwSwapBuffers(window);
glfwPollEvents();
}
Then I use this textures together with this shaders
Fragment
#version 330 core
out vec4 FragColor;
in vec3 ourColor;
in vec2 TexCoord;
uniform sampler2D texture1;
uniform sampler2D texture2;
void main()
{
FragColor = mix(texture(texture1, TexCoord), texture(texture2, TexCoord), 1);
}
Vertex
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aColor;
layout (location = 2) in vec2 aTexCoord;
out vec3 ourColor;
out vec2 TexCoord;
void main()
{
gl_Position = vec4(aPos, 1.0);
ourColor = aColor;
TexCoord = aTexCoord;
}
float vertices[] = {
// positions // colors // texture coords
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, // top right
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // bottom right
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, // bottom left
-0.5f, 0.5f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f // top left
};
unsigned int indices[] = {
0, 1, 3,
1, 2, 3,
};
unsigned int VBO;
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
unsigned int VAO;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
unsigned int EBO;
glGenBuffers(1, &EBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 8 * sizeof(float), (void*)(6 * sizeof(float)));
glEnableVertexAttribArray(2);
I'm sorry if my english is hideous but it's middle of the night and i'm struggling with this for hours right now
I found solution, enabling blend was not satisfying so i wrote fragmentShader in different way
#version 330 core
out vec4 FragColor;
in vec3 ourColor;
in vec2 TexCoord;
uniform sampler2D texture1;
uniform sampler2D texture2;
void main()
{
vec4 AlphaColor = texture(texture2, TexCoord);
if(AlphaColor.a < 0.1) {
FragColor = texture(texture1, TexCoord);
} else {
FragColor = mix(texture(texture1, TexCoord), AlphaColor, 0.9);
}
}
Right now FragmentShader understands alpha channel and will mix only pixels without alpha
I am learning OpenGL through the tutorials at learnopengl.com.
I've already drawn my hand-defined 3x3 texture onto a square using 2 triangles without a shader. Using shaders, however, I don't understand this weird behavior:
The fragment shader seems to ignore texture coordinates but strangely paints my two triangles with the color of the first pixel. I tried putting garbage values into the texture coordinates and it always stays the same.
unsigned char texture_buffer[9*4] = // 3x3 texture RGBA ...
{
255,40,100,255, 100,200,200,255, 150,150,200,255,
100,100,150,255, 200,200,100,255, 150,200,150,255,
150,100,100,255, 200,100,200,255, 200,150,150,255
};
float positions_texcoords[] =
{ // x y z tex coords
0.0f, 0.0f, 0.0f, 0.0f, 0.0f,
0.5f, 0.0f, 0.0f, 1.0f, 0.0f,
0.0f, 0.5f, 0.0f, 0.0f, 1.0f,
0.5f, 0.5f, 0.0f, 1.0f, 1.0f,
0.5f, 0.0f, 0.0f, 1.0f, 0.0f,
0.0f, 0.5f, 0.0f, 0.0f, 1.0f
};
one time draw call:
GLuint texture = 0;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 3, 3, 0,
GL_RGBA, GL_UNSIGNED_BYTE, (void *)texture_buffer );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
unsigned int VBO, VAO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(positions_texcoords),
positions_texcoords, GL_STATIC_DRAW);
// position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
// texture coord attribute
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
glClearColor(0.3f, 0.0f, 0.1f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, texture);
glUseProgram(shader_program_id);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 6);
SwapBuffers(window_dc);
vertex shader:
#version 330 core
layout (location = 0) in vec3 in_pos;
layout (location = 1) in vec2 in_tex_coord;
out vec2 out_tex_coord;
void main()
{
gl_Position = vec4(in_pos, 1.0);
out_tex_coord = in_tex_coord;
}
fragment shader:
#version 330 core
out vec4 FragColor;
in vec2 in_tex_coord;
uniform sampler2D in_texture;
void main()
{
FragColor = texture(in_texture, in_tex_coord);
}
I'd appreciate any suggestions about what might be going on. Thank you.
The output of a shader stage is linked to the input of the next shader stage by its name (except when you use a layout qualifier). See interface matching rules between shader stages.
The name of the fragment shader input variable has to be the same as the name of the vertex shader output variable.
Since the name of the texture coordinate output in the vertex shader is out_tex_coord
out vec2 out_tex_coord;
the name of the corresponding input in the fragment shader has to be out_tex_coord, too:
in vec2 out_tex_coord;
void main()
{
FragColor = texture(in_texture, out_tex_coord);
}
I am having some strange behavior when trying to draw a texture in OpenGL. Currently all this program does for me is draw the background color with no indication of a texture being drawn. I have just moved from Visual Studio (where this code produces the correct output) to compiling in the command prompt. This code should color the background and draw one texture in the center of the screen.
I am concerned that I may have supplied the incorrect libraries for compilation since as far as I am concerned everything I am doing is the same. Different libraries, however, always said that they were incompatible.
Main code:
#define GLEW_STATIC
#include <GL/glew.h> // window management library
#include <GL/glfw3.h>
#include <GL/glm.hpp>
#include <GL/gtc/matrix_transform.hpp> //
#define STB_IMAGE_IMPLEMENTATION
#include "stb_image.h"
int main(int argc, char** argv){
//Initialize GLFW and GLEW...
//Setup and combine shaders...
GLint vertex_att = glGetAttribLocation(program, "vertex");
glVertexAttribPointer(vertex_att, 2, GL_FLOAT, GL_FALSE, 7*sizeof(GLfloat), 0);
glEnableVertexAttribArray(vertex_att);
GLint color_att = glGetAttribLocation(program, "color");
glVertexAttribPointer(color_att, 3, GL_FLOAT, GL_FALSE, 7*sizeof(GLfloat), (void *) (2 *sizeof(GLfloat)));
glEnableVertexAttribArray(color_att);
GLint tex_att = glGetAttribLocation(program, "uv");
glVertexAttribPointer(tex_att, 2, GL_FLOAT, GL_FALSE, 7*sizeof(GLfloat), (void *) (5 *sizeof(GLfloat)));
glEnableVertexAttribArray(tex_att);
glUseProgram(program);
GLuint texture;
glGenTextures(1, &texture);
setthisTexture(texture, "./black.png");
// Create geometry of the square
int size = CreateSquare();
while (!glfwWindowShouldClose(window)){
// Clear background
glClearColor(viewport_background_color_g[0],
viewport_background_color_g[1],
viewport_background_color_g[2], 0.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//set displacement - 'program' being the shader program
int matrixLocation = glGetUniformLocation(program, "x");
glm::mat4 translate = glm::mat4();
translate = glm::translate(translate, glm::vec3(0.0f, 0.0f, 0.0f));
glUniformMatrix4fv(matrixLocation, 1, GL_FALSE, &translate[0][0]);
glBindTexture(GL_TEXTURE_2D, texture);
glDrawElements(GL_TRIANGLES, size, GL_UNSIGNED_INT, 0);
glfwPollEvents();
glfwSwapBuffers(window);
}
}
Vertex Shader:
#version 130
in vec2 vertex;
in vec3 color;
in vec2 uv;
out vec2 uv_interp;
// Uniform (global) buffer
uniform mat4 x;
// Attributes forwarded to the fragment shader
out vec4 color_interp;
void main(){
vec4 t;
t = vec4(vertex, 0.0, 1.0);
gl_Position = x*t;
color_interp = vec4(color, 1.0);
uv_interp = uv;
}
Fragment Shader:
#version 130
in vec4 color_interp;
in vec2 uv_interp;
uniform sampler2D onetex;
void main(){
vec4 color = texture2D(onetex, uv_interp);
gl_FragColor = vec4(color.r,color.g,color.b,color.a);
if(gl_FragColor.a < 0.9){
discard;
}
}
setthisTexture:
void setthisTexture(GLuint w, const char *fname)
{
glBindTexture(GL_TEXTURE_2D, w);
int width, height, nrChannels;
unsigned char* image = stbi_load(fname, &width, &height, &nrChannels, 0);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, image);
stbi_image_free(image);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
}
CreateSquare:
int CreateSquare(void) {
// The face of the square is defined by four vertices and two triangles
// Number of attributes for vertices and faces
// const int vertex_att = 7; // 7 attributes per vertex: 2D (or 3D) position (2), RGB color (3), 2D texture coordinates (2)
// const int face_att = 3; // Vertex indices (3)
GLfloat vertex[] = {
// square (two triangles)
// Position Color Texcoords
-0.5f, 0.5f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, // Top-left
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // Top-right
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, // Bottom-right
-0.5f, -0.5f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f // Bottom-left
};
GLuint face[] = {
0, 1, 2, // t1
2, 3, 0 //t2
};
GLuint vbo, ebo;
// Create buffer for vertices
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertex), vertex, GL_STATIC_DRAW);
// Create buffer for faces (index buffer)
glGenBuffers(1, &ebo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ebo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(face), face, GL_STATIC_DRAW);
// Return number of elements in array buffer
return sizeof(face);
}
For the use of glVertexAttribPointer either a named GL_ARRAY_BUFFER buffer object has to be bound or a pointer to the vertex data has to be passed.
In your case this means, that
int size = CreateSquare();
has to be done before
GLint vertex_att = glGetAttribLocation(program, "vertex");
glVertexAttribPointer(vertex_att, 2, GL_FLOAT, GL_FALSE, 7*sizeof(GLfloat), 0);
glEnableVertexAttribArray(vertex_att);
.....
Note in the function CreateSquare, the named buffer object vbo is bound:
glBindBuffer(GL_ARRAY_BUFFER, vbo);
which is used by the glVertexAttribPointer calls.
See OpenGL 4.6 API Compatibility Profile Specification; 10.3.9 Vertex Arrays in Buffer Objects; page 409:
A buffer object binding point is added to the client state associated with each
vertex array type and index. The commands that specify the locations and organizations
of vertex arrays copy the buffer object name that is bound to ARRAY_-
BUFFER to the binding point corresponding to the vertex array type or index being
specified. For example, the VertexAttribPointer command copies the value of
ARRAY_BUFFER_BINDING (the queriable name of the buffer binding corresponding
to the target ARRAY_BUFFER) to the client state variable VERTEX_ATTRIB_-
ARRAY_BUFFER_BINDING for the specified index.
I can't seem to get this OpenGL program to render a quad after I add a VAO.
Assuming the program is correctly initialized without errors, the following is the code that fills the VAO.
float quad[] =
{
//verts colors
32.0f, 0.0f, 1.0f, 0.0f, 0.0f, // Top-left
32.0f, 32.0f, 0.0f, 1.0f, 0.0f, // Top-right
0.0f, 32.0f, 0.0f, 0.0f, 1.0f, // Bottom-right
0.0f, 0.0f, 1.0f, 1.0f, 1.0f // Bottom-left
};
float textureCoords[] =
{
0.0f, 0.0f, //x
0.5f, 0.0f, //w
0.5f, 0.5f, //y
0.0f, 0.5f //h
};
float elements[] =
{
0,1,2,
2,3,0
};
//loadShaders compiles and links both shaders
GLuint shaders = loadShaders("vertexShader.c", "fragmentShader.c");
GLuint VAO, VBO[2], EBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(2, VBO);
glGenBuffers(1, &EBO);
//bind VAO
glBindVertexArray(VAO);
glUseProgram(shaders);
//quad and color data
glBindBuffer(GL_ARRAY_BUFFER, VBO[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(quad), quad, GL_STATIC_DRAW);
GLint quadAttrib = glGetAttribLocation(shaders, "quad");
glEnableVertexAttribArray(quadAttrib);//target 'quad' in shader
glVertexAttribPointer(quadAttrib, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat), 0);
//color data
GLint colorAttrib = glGetAttribLocation(shaders, "color");
glEnableVertexAttribArray(colorAttrib);//target 'color' in shader
glVertexAttribPointer(colorAttrib, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat), (void*)(2 * sizeof(GLfloat)));
//UV data
glBindBuffer(GL_ARRAY_BUFFER, VBO[1]);
glBufferData(GL_ARRAY_BUFFER, sizeof(textureCoords), textureCoords, GL_STATIC_DRAW);
GLint uvAttrib = glGetAttribLocation(shaders, "uvCoords");//target 'uvCoords' in shaders
glEnableVertexAttribArray(uvAttrib);
glVertexAttribPointer(uvAttrib, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), 0);
//TEXTURES
//laod and use textures
GLuint textures[2];
glGenTextures(2, textures);
int texWidth, texHeigth;
unsigned char *image;
//activate texture 0
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textures[0]);
image = SOIL_load_image("atlas.png", &texWidth, &texHeigth, 0, SOIL_LOAD_RGB);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, texWidth, texHeigth, 0, GL_RGB, GL_UNSIGNED_BYTE, image);
SOIL_free_image_data(image);
glUniform1i(glGetUniformLocation(shaders, "img1"), 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
//element buffer data
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(elements), elements, GL_STATIC_DRAW);
//UNIFORMS
//projection matrix
GLint projectionUniform = glGetUniformLocation(shaders, "projection");
glm::mat4 orthoProjection = glm::ortho( 0.0f, static_cast<float>(480), 0.0f, static_cast<float>(272));
glUniformMatrix4fv(projectionUniform, 1, GL_FALSE, glm::value_ptr(orthoProjection));
//model view projection
GLint modelViewUniform = glGetUniformLocation(shaders, "modelView");
//unbind VAO and current shader, the VAO remembers the bound shader
glBindVertexArray(0);
glUseProgram(0);
I'm assuming that the VAO has now kept track of the following:
The quad buffer 'VBO[0]' and its corresponding attribPointer 'quadAttrib' to "quad" in the shader
The color buffer 'same VBO[0]' and its corresponding attribPointer 'colorAttrib' to 'color' in shader
The UV buffer 'VBO[1]' and its corresponding attribPointer 'uvAttrib' to 'uvCoords' in shader
That the current texture unit (0) corresponds to the loaded texture and the bind to "img1" in the fragment shader as well as its parameters
The EBO that is bounded to GL_ELEMENT_ARRAY_BUFFER
The projection matrix and its uniform
The handler to the model matrix
The shader program that was in use while the VAO was bound? Not sure, I still explicitly use the only shader program in the program
Later in the main loop, if I attempt the following, nothing gets drawn:
// Clear the screen to black
glClearColor(0.2f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
//draw the quad within this VAO
glBindVertexArray(VAO);
glUseProgram(shaders);
glm::mat4 model;
model = glm::translate(glm::mat4(1.0f), glm::vec3(newTransX, newTransY, newTransZ));
model = glm::scale(model, glm::vec3(newScale));
model = glm::rotate(
model,
(GLfloat)clock() / (GLfloat)CLOCKS_PER_SEC * 10000.0f,
glm::vec3(0.0f, 0.0f, 1.0f)
);
// upload the uniform matrix, modelViewUniform should be already linked to the shader uniform through the VAO
glUniformMatrix4fv(modelViewUniform, 1, GL_FALSE, glm::value_ptr(model));
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glBindVertexArray(0);
SDL_GL_SwapWindow(window);
Vertex shader:
#version 330
in vec2 quad;
in vec3 color;
in vec2 uvCoords;
out vec3 Color;
out vec2 UVCoords;
uniform mat4 modelView;
uniform mat4 projection;
void main()
{
Color = color;
UVCoords = uvCoords;
gl_Position = projection * modelView * vec4(quad, 0.0f, 1.0f);
}
Fragment shader:
#version 330
in vec3 Color;//not in use, simple pipeline test
in vec2 UVCoords;
out vec4 outColor;
uniform sampler2D img1;
void main()
{
vec4 finalTexture = texture(img1, UVCoords);
outColor = finalTexture;
}
What am I doing wrong? I know for a fact that the values in the modelView and projection matrix are correct, since the program works If I never use a VAO.
Am I assuming that the VAO remembers more than it actually does? If not, what am I doing wrong?
This can't be right:
float elements[] =
{
0,1,2,
2,3,0
};
You can't use floats for vertex indices. Based on the type you pass to glDrawElements(), this should be:
GLuint elements[] =
{
0,1,2,
2,3,0
};
As for the state tracked in a VAO: No, it does not hold on to all that state. It only tracks the vertex attribute setup state. From your list:
The quad buffer 'VBO[0]' and its corresponding attribPointer 'quadAttrib' to "quad" in the shader
Yes. More precisely, the VBO/pointer are associated with the location quadAttrib, and that association is tracked in the VAO. You queried quadAttrib from your shader, but the VAO state does not contain any direct association with the shader.
The color buffer 'same VBO[0]' and its corresponding attribPointer 'colorAttrib' to 'color' in shader
Same here.
The UV buffer 'VBO[1]' and its corresponding attribPointer 'uvAttrib' to 'uvCoords' in shader
And here.
That the current texture unit (0) corresponds to the loaded texture and the bind to "img1" in the fragment shader as well as its parameters
No. That has nothing to do with the vertex attribute state.
The EBO that is bounded to GL_ELEMENT_ARRAY_BUFFER
Yes.
The projection matrix and its uniform
No. The uniform value is part of the program state.
The handler to the model matrix
No.
The shader program that was in use while the VAO was bound?
No.
Here's my code
vertex shader
#version 430
in vec3 position;
uniform mat4 MVP;
void main()
{
gl_Position = MVP * vec4(position, 1.0);
}
fragment shader
#version 430
out vec4 outputColor;
uniform sampler2D tex;
void main()
{
outputColor = texture(tex, gl_PointCoord);
}
init
void init()
{
glPointSize(20.0f);
GLuint tex;
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
bmp.Open("C:\\users\\alon\\desktop\\star.bmp");
bmp.ReadPixels();
unsigned char *pixels = new unsigned char[bmp.NeededBufferSize()];
bmp.AssignPixels(pixels);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, bmp.GetWidth(), bmp.GetHeight(), 0, GL_RGB, GL_UNSIGNED_BYTE, pixels);
delete[] pixels;
glClearColor(0.2f, 0.2f, 0.2f, 1.0f);
glEnable(GL_DEPTH_TEST);
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
GLuint VaoId;
glGenVertexArrays(1, &VaoId);
glBindVertexArray(VaoId);
position_index = glGetAttribLocation(program, "position");
MVP_location = glGetUniformLocation(program, "MVP");
// x y z s t
GLfloat vertices[] = {-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
0.5f, 0.5f, 0.0f,
0.5f, 0.5f, 0.0f,
-0.5f, 0.5f, 0.0f,
-0.5f, -0.5f, 0.0f};
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(position_index, 3, GL_FLOAT, GL_FALSE, 0, 0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
render
void render()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnableVertexAttribArray(position_index);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
float c;
if(bTime)
c = (float)std::clock() * 2.0f / CLOCKS_PER_SEC;
else
c = 0.0f;
glm::mat4 modelview = glm::rotate(-c * 50.0f, glm::vec3(0.0f, 1.0f, 0.0f));
modelview = glm::translate(glm::vec3(0.0f + fTranslateX, 0.0f, -1.75f + fTranslateZ)) * modelview;
glm::mat4 projection = glm::perspective(60.0f, 16.0f/9.0f, 0.10f, 100.0f); //perspective
glm::mat4 MVP = projection * modelview;
glUniformMatrix4fv(MVP_location, 1, GL_FALSE, &MVP[0][0]);
glDrawArrays(GL_POINTS, 0, 6);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glDisableVertexAttribArray(position_index);
glutSwapBuffers();
if(glGetError() != GL_NO_ERROR)
exit(1);
}
The star image is:
Output:
Is there anything I'm doing wrong?
In compatbility profiles (or any version of GL before 3.1), the behavior you want only occurs when GL_POINT_SPRITE is enabled. In a core profile, that state was removed, and gl_PointCoord always behaves as if GL_POINT_SPRITE were enabled.
To fix your problem, you must call glEnable (GL_POINT_SPRITE).