Different fragment shader colors - c++

I want to draw different figures with different colors using shaders.
I tried using uniform variables and using vertex attributes of the meshes to separate Vertex Buffer Objects but when I run the program only the second color is used and the first figure isn't shown at all. Im not sure if it's an issue with how I bind the shaders or with the way I use uniform variables. Thanks in advance.
Here is the code:
#include <GL/glew.h>
#include <GL/glut.h>
#include <stdio.h>
const char* vertexShaderSource = "#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);\n"
"}\0";
const char* fragmentShaderSource = "#version 330 core\n"
"in vec4 color;\n"
"out vec4 FragColor;\n"
"uniform vec3 figColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(figColor, 0.0f);\n"
"}\n\0";
// Set of vertices that draw the different figures:
float vertices1[] = {
-0.8f, 0.6f, 0.0f,
-0.8f, 0.4f, 0.0f,
-0.83f, 0.44f, 0.0f,
-0.87f, 0.51f, 0.0f,
-0.9f, 0.57f, 0.0f,
-0.93f, 0.63f, 0.0f,
-0.95f, 0.69f, 0.0f,
-0.97f, 0.75f, 0.0f,
-0.98f, 0.8f, 0.0f,
-0.91f, 0.8f, 0.0f,
-0.85f, 0.79f, 0.0f,
-0.8f, 0.77f, 0.0f,
};
float vertices2[] = {
-0.8f, -0.15f, 0.0f,
-1.0f, 0.0f, 0.0f,
-0.95f, 0.0f, 0.0f,
-0.86f, -0.01f, 0.0f,
-0.81f, -0.02f, 0.0f,
-0.75f, -0.03f, 0.0f,
};
GLuint vertexShader;
GLuint fragmentShader;
GLuint shaderProgram;
void display(void) {
// Background color.
glClearColor(1.0f, 1.0f, 1.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
GLint uniform = glGetUniformLocation(shaderProgram, "figColor");
glUniform3f(uniform, 1.0f, 0.0f, 0.0f);
glDrawArrays(GL_TRIANGLE_FAN, 0, 12);
glUniform3f(uniform, 0.0f, 0.749f, 1.0f);
glDrawArrays(GL_TRIANGLE_FAN, 0, 6);
glUseProgram(0);
glFlush();
}
// Main.
int main(int argc, char** argv) {
glutInit(&argc, argv);
// Color mode.
glutInitDisplayMode(GLUT_RGBA);
// Window size.
glutInitWindowSize(500, 500);
// Title.
glutCreateWindow("Tarea 3: Figura con curvas");
GLenum err = glewInit();
if (err != GLEW_OK) {
printf("glewInit failed: %s", glewGetErrorString(err));
exit(1);
}
// Compile vertex shader.
vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
// Check for vertex shader compilation errors.
int success;
char infoLog[512];
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(vertexShader, 512, NULL, infoLog);
printf("ERROR::SHADER::VERTEX::COMPILATION_FAILED\n");
}
// Compile fragment shader.
fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
// Check for fragment shader compile errors.
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(fragmentShader, 512, NULL, infoLog);
printf("ERROR::SHADER::FRAGMENT::COMPILATION_FAILED\n");
}
// Link and delete shaders.
shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
//Separating the Vertex Buffer Objects.
GLuint VBOs[2];
glBindBuffer(GL_ARRAY_BUFFER, VBOs[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices1), vertices1, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, VBOs[1]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices2), vertices2, GL_STATIC_DRAW);
// Specify a VertexArrayObject for each mesh.
GLuint VAOs[2];
glGenVertexArrays(2, VAOs);
glBindVertexArray(VAOs[0]);
glBindBuffer(GL_ARRAY_BUFFER, VBOs[0]);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glBindVertexArray(VAOs[1]);
glBindBuffer(GL_ARRAY_BUFFER, VBOs[1]);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
// Bind the VAO before the draw call.
glBindVertexArray(VAOs[0]);
glDrawArrays(GL_TRIANGLE_FAN, 0, 12);
glBindVertexArray(VAOs[1]);
glDrawArrays(GL_TRIANGLE_FAN, 0, 24);
// Displaying the figures.
glutDisplayFunc(display);
glutMainLoop();
}

You missed to generate the vertex buffer object names:
//Separating the Vertex Buffer Objects.
GLuint VBOs[2];
glGenBuffers(2, VBOs); <----
You have to bind the vertex array before drawing the mesh in the main loop:
GLuint VAOs[2];
void display(void) {
// Background color.
glClearColor(1.0f, 1.0f, 1.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
GLint uniform = glGetUniformLocation(shaderProgram, "figColor");
glUniform3f(uniform, 1.0f, 0.0f, 0.0f);
glBindVertexArray(VAOs[0]); // <---
glDrawArrays(GL_TRIANGLE_FAN, 0, 12);
glUniform3f(uniform, 0.0f, 0.749f, 1.0f);
glBindVertexArray(VAOs[1]); // <----
glDrawArrays(GL_TRIANGLE_FAN, 0, 6);
glUseProgram(0);
glFlush();
}
int main(int argc, char** argv) {
// [...]
// Specify a VertexArrayObject for each mesh.
// GLuint VAOs[2]; <---- DELETE
glGenVertexArrays(2, VAOs);
// [...]
// DELETE
/*
// Bind the VAO before the draw call.
glBindVertexArray(VAOs[0]);
glDrawArrays(GL_TRIANGLE_FAN, 0, 12);
glBindVertexArray(VAOs[1]);
glDrawArrays(GL_TRIANGLE_FAN, 0, 24);
*/
// [...]

Related

OpenGL fragment shader crashes with EXC_BAD_ACCESS on glDrawArrays

I'm using OpenGL 3.3 on Mac OSX 11.5.2. I have 6 fragment shaders, 3 that works and 3 that crashes with EXC_BAD_ACCESS. They all use the same vertex shader. There have been many questions about this problem, but I've made sure to:
Unbind my previous state.
Made sure attribute arrays enabled and used.
Am only using a single-thread, i.e. no concurrency.
The shaders that crash are all using one array of offsets and a kernel to do post-processing effects. If I replace texture_coordinate.st + offsets[i] with just texture_coordinate.st, it'll work just fine. As this makes the offsets array unused and optimized out, I have a suspicion that it could be due to a register allocation bug as mentioned in the comments in this question, but can't confirm.
I've also checked for other problems, such as loading them in different orders, using different compiler flags, but the crashes are consistent.
Here's a minimal, reproducible example (dependent on glad and glfw3, compiled with c++17) with one of the problematic shaders.
#include <glad/glad.h>
#include <GLFW/glfw3.h>
const char VERTEX_POST[] = R"(
#version 330 core
layout (location = 0) in vec2 position;
layout (location = 1) in vec2 in_texture_coordinate;
out vec2 texture_coordinate;
void main()
{
gl_Position = vec4(position.x, position.y, 0.0, 1.0);
texture_coordinate = in_texture_coordinate;
}
)";
const char FRAGMENT_POST[] = R"(
#version 330 core
in vec2 texture_coordinate;
out vec4 FragColor;
uniform sampler2D image;
const float offset = 1.0 / 300.0;
void main()
{
vec2 offsets[9] = vec2[](
vec2(-offset, offset), // top-left
vec2( 0.0f, offset), // top-center
vec2( offset, offset), // top-right
vec2(-offset, 0.0f), // center-left
vec2( 0.0f, 0.0f), // center-center
vec2( offset, 0.0f), // center-right
vec2(-offset, -offset), // bottom-left
vec2( 0.0f, -offset), // bottom-center
vec2( offset, -offset) // bottom-right
);
float kernel[9] = float[](
-1, -1, -1,
-1, 9, -1,
-1, -1, -1
);
vec3 sample_texture[9];
for(int i = 0; i < 9; i++)
sample_texture[i] = vec3(texture(image, texture_coordinate + offsets[i]));
vec3 color = vec3(0.0);
for (int i = 0; i < 9; i++)
color += sample_texture[i] * kernel[i];
FragColor = vec4(color, 1.0);
}
)";
float vertices[] = {
// Positions // Texture coords
-1.0f, -1.0f, 0.0f, 0.0f, 0.0f,
1.0f, 1.0f, 0.0f, 1.0f, 1.0f,
-1.0f, 1.0f, 0.0f, 0.0f, 1.0f,
-1.0f, -1.0f, 0.0f, 0.0f, 0.0f,
1.0f, -1.0f, 0.0f, 1.0f, 0.0f,
1.0f, 1.0f, 0.0f, 1.0f, 1.0f
};
GLuint create_shader(const char* source, GLenum type)
{
GLuint id = glCreateShader(type);
glShaderSource(id, 1, &source, nullptr);
glCompileShader(id);
int success;
glGetShaderiv(id, GL_COMPILE_STATUS, &success);
assert(success);
return id;
}
int main()
{
// ---- INIT GLFW & GLAD ----
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_DEBUG_CONTEXT, GLFW_TRUE);
GLFWwindow* window = glfwCreateWindow(800, 800, "Temp", nullptr, nullptr);
if (window == nullptr) return -1;
glfwMakeContextCurrent(window);
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress)) return -1;
// ---- CREATE QUAD ----
GLuint quad, vbo;
glGenVertexArrays(1, &quad);
glGenBuffers(1, &vbo);
glBindVertexArray(quad);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat), (void*)0);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat), (void*)(3 * sizeof(float)));
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// ---- CREATE TEXTURE ----
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
char data[800 * 800 * 4] = {};
for (char& i : data) i = 127;
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 800, 800, 0, GL_RGBA, GL_UNSIGNED_BYTE, data);
glGenerateMipmap(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, 0);
// ---- CREATE SHADER PROGRAM ----
GLuint vertex_shader = create_shader(VERTEX_POST, GL_VERTEX_SHADER);
GLuint fragment_shader = create_shader(FRAGMENT_POST, GL_FRAGMENT_SHADER);
GLuint program = glCreateProgram();
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
glLinkProgram(program);
glValidateProgram(program);
int success;
glGetProgramiv(program, GL_LINK_STATUS, &success);
assert(success);
// ---- MAIN LOOP ----
while (!glfwWindowShouldClose(window))
{
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(program);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texture);
glUniform1i(glGetUniformLocation(program, "image"), 0);
glBindVertexArray(quad);
glDrawArrays(GL_TRIANGLES, 0, 6);
glfwSwapBuffers(window);
glfwPollEvents();
}
return 0;
}
The example gives me a grey image when not including offsets and otherwise exhibits the crash. What could be causing this?

OpenGL: Program renders plain white square instead of a transformed square

I am trying to render a 2D square that has color and is transformed due to 4x4 matrices. The output is supposed to look like this:
However, I am instead getting this:
This is frustrating because I had this problem in another project. My professor walked me through how to solve it and all we did was play around with a few things here and there and it just magically worked, no code was altered. Now I am experiencing this issue again and there are no clues as to why this is happening. I typed the tutorial code correctly character for character and it is not being rendered correctly. It did however render correctly on my professor's computer.
Here is the code:
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <iostream>
#define WINDOW_TITLE "Modern OpenGL"
#ifndef GLSL
#define GLSL(Version, Source) "#version" #Version "\n" #Source
#endif
GLint shaderProgram, windowWidth = 800, windowHeight = 600;
GLuint VBO, VAO, EBO, texture;
void uResizeWindow(int, int);
void uRenderGraphics();
void uCreateShader();
void uCreateBuffers();
//Vertex shader source code
const GLchar* vertexShaderSource = GLSL(330,
layout(location = 0) in vec3 position;
layout(location = 1) in vec3 color;
out vec3 mobileColor;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main() {
gl_Position = projection * view * model * vec4(position, 1.0f); //transforms vertices to clip coordinates
mobileColor = color; //references incoming color data
});
//Fragment shader source code
const GLchar* fragmentShaderSource = GLSL(330,
in vec3 mobileColor;
out vec4 gpuColor;
void main() {
gpuColor = vec4(mobileColor, 1.0f);
});
int main(int argc, char** argv) {
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGBA);
glutInitWindowSize(windowWidth, windowHeight);
glutCreateWindow(WINDOW_TITLE);
glutReshapeFunc(uResizeWindow);
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK) {
std::cout << "Failed to initialize GLEW" << std::endl;
}
uCreateShader();
uCreateBuffers();
glUseProgram(shaderProgram);
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glutDisplayFunc(uRenderGraphics);
glutMainLoop();
//Destroys buffer objects once used
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteBuffers(1, &EBO);
return 0;
}
void uResizeWindow(int w, int h) {
windowWidth = w;
windowHeight = h;
glViewport(0, 0, windowWidth, windowHeight);
}
void uRenderGraphics() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindVertexArray(VAO);
glm::mat4 model(1.0f);
model = glm::translate(model, glm::vec3(0.0f, 0.0f, 0.0f)); //place object at center of viewport
model = glm::rotate(model, 15.0f, glm::vec3(1.0f, 0.0f, 0.0f)); //rotate object 15 degrees on x-axis
model = glm::scale(model, glm::vec3(2.0f, 2.0f, 2.0f)); //increase object size by factor of 2
//transforms the camera
glm::mat4 view(1.0f);
view = glm::translate(view, glm::vec3(0.0f, 0.0f, -3.0f)); //moves camera backwards -3 units in z
//creates perspective projection
glm::mat4 projection(1.0f);
projection = glm::perspective(45.0f, (GLfloat)windowWidth / (GLfloat)windowHeight, 0.1f, 100.0f);
//retrieves and passes transform matrices to shader program
GLint modelLoc = glGetUniformLocation(shaderProgram, "model");
GLint viewLoc = glGetUniformLocation(shaderProgram, "view");
GLint projLoc = glGetUniformLocation(shaderProgram, "projection");
//draws the triangles
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glBindVertexArray(0); //deactive the vertex array object
glutSwapBuffers();
}
void uCreateShader() {
//vertex
GLint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
//fragment
GLint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
}
void uCreateBuffers() {
GLfloat vertices[] = {
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 0.0f, //top right vertex 0
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f, //bottom right vertex 1
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, //bottom left vertex 2
-0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 1.0f //top left vertex 3
};
GLuint indices[] = {
0, 1, 3, //triangle 1
1, 2, 3 //triangle 2
};
//gen buffer ids
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
//set attrib pointer 0 to hold pos data
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)0);
glEnableVertexAttribArray(0);
//set attrib pointer 1 to hold color data
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)(3 * sizeof(GLfloat)));
glEnableVertexAttribArray(1);
glBindVertexArray(0); //deactivate VAO
}
The vertex shader and the fragment shader cannot be compiled because a space is missing in the GLSL macro (after #version):
#define GLSL(Version, Source) "#version" #Version "\n" #Source
#define GLSL(Version, Source) "#version " #Version "\n" #Source
I recommend to check if the shader compilation succeeded and if the program object linked successfully.
If the compiling of a shader succeeded can be checked by glGetShaderiv and the parameter GL_COMPILE_STATUS. If the linking of a program was successful can be checked by glGetProgramiv and the parameter GL_LINK_STATUS.
See the answer to OpenGL ignores Quads and makes them Triangles.
The unit of the angle argument of glm::perspective is radian:
projection = glm::perspective(45.0f, (GLfloat)windowWidth / (GLfloat)windowHeight, 0.1f, 100.0f);
projection = glm::perspective(glm::radians(45.0f),
(GLfloat)windowWidth / (GLfloat)windowHeight, 0.1f, 100.0f);
You missed to set the matrix uniforms:
GLint modelLoc = glGetUniformLocation(shaderProgram, "model");
GLint viewLoc = glGetUniformLocation(shaderProgram, "view");
GLint projLoc = glGetUniformLocation(shaderProgram, "projection");
glUniformMatrix4fv(modelLoc, 1, GL_FALSE, glm::value_ptr(model));
glUniformMatrix4fv(viewLoc, 1, GL_FALSE, glm::value_ptr(view));
glUniformMatrix4fv(projLoc, 1, GL_FALSE, glm::value_ptr(projection));

How to translate and rotate a Triangle in OpenGL over time?

I am trying to rotate and translate my single triangle over time. I have already written the main.cpp and I have written separate files for my Vertex and Fragment shader source code.
Here is the code in my main.cpp file:
void framebuffer_size_callback(GFLWwindow* window, int width, int height);
void processInput(GLFWwindow *window);
// Shaders
const char *vertexShaderSource =
"#version 410\n"
"in vec3 vp;\n"
"void main()\n"
"{\n"
"gl_Position = vec4(aPos, 1.0);\n"
"}\0";
const char *fragmentShaderSource =
"#version 410\n"
"out vec4 FragColor;\n"
"in vec3 myColor;\n"
"void main()\n"
"{\n"
"FragColor = vec4(myColor, 1.0f);\n"
"}\n\0";
int main ()
{
// start GL context and O/S window using the GLFW helper library
if (!glfwInit ())
{
fprintf (stderr, "ERROR: could not start GLFW3\n");
return 1;
}
// uncomment these lines if on Apple OS X
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(640, 480, "LearnOpenGL", NULL, NULL);
if (!window)
{
fprintf(stderr, "ERROR: could not open window with GLFW3\n");
glfwTerminate();
return 1;
}
glfwMakeContextCurrent(window);
// start GLEW extension handler
glewExperimental = GL_TRUE;
glewInit();
// get version info
const GLubyte* renderer = glGetString(GL_RENDERER);
const GLubyte* version = glGetString(GL_VERSION);
printf("Renderer: %s\n", renderer);
printf("OpenGL version supported %s\n", version);
glEnable(GL_DEPTH_TEST); // enable depth-testing
glDepthFunc(GL_LESS);
// Draw a single triangle
float points[] = {
// positions // colors
0.0f, 0.5f, 0.0f, 1.0f, 0.0f, 0.0f,
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f,
-0.5f, -0.5f, 0.0f, 0.0f 0.0f, 1.0f
};
GLuint VBO = 0;
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(points), points, GL_STATIC_DRAW);
// Generate a VAO.
GLuint VAO = 0;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(points), points, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(0);
// Compile a Vertex Shader
int vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
// Compile a fragment shader.
int fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
// Compile shaders into a executable shader program.
int shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, fragmentShader);
glAttachShader(shaderProgram, vertexShader);
glLinkProgram(shaderProgram);
// Create another float array to make my triangle fan.
float points_5_triangles[] = {
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f,
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f,
0.0f, 0.5f, 0.0f, 1.0f, 2.0f, 0.5f,
// Another triangle made from point 1, 3, and 4
-0.5f,
}
// Generate another VBO for my Triangle Fan
GLuint VBO_5_triangles = 0;
glGenBuffers(1, &VBO_5_triangles);
glBindBuffer(GL_ARRAY_BUFFER, VBO_5_triangles);
glBufferData(GL_ARRAY_BUFFER, sizeof(points_5_triangles), points_5_triangles, GL_STATIC_DRAW);
// Generate another VAO for my Triangle Fan
GLuint VAO_5_triangles = 0;
glGenVertexArrays(1, &VAO_5_triangles);
glBindVertexArray(VAO_5_triangles);
glBindBuffer(GL_ARRAY_BUFFER, VBO_5_triangles);
glBufferData(GL_ARRAY_BUFFER, sizeof(points_5_triangles), points_5_triangles, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
glEnableVertexAttribArray(0);
// Drawing the triangles aka render loop
while (!glfwWindowShouldClose(window))
{
processInput(window);
// wipe the drawing surface clear
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Draw Triangle
glUseProgram(shaderProgram);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
// Draw Triangle Fan
glBindVertexArray(VAO_5_triangles);
glDrawArrays(GL_TRIANGLE_FAN, 0, 7);
// swap buffers and poll IO events
glfwPollEvents();
glfwSwapBuffers(window);
}
// close GL context and any other GLFW resources
glfwTerminate();
return 0;
}
The tutorial I am following does go over transformations but in the example it uses, the triangles have textures as well as shaders. For my purposes, I want to do this without textures added to my code.
Can someone walk me through how to add 2 transformations: translate and rotation to my "single triangle" as shown in this code?
Your triangle having a texture or not has nothing to do with transformation.
You rotate your triangle simply by calculating a transformation matrix, passing it to your vertex shader and multiply it with your coordinates.
Your Vertex Shader should look look something like this:
const char *vertexShaderSource =
"#version 410\n"
"layout (location = 0) in vec3 vp;\n"
"uniform mat4 transform;"
"void main()\n"
"{\n"
" gl_Position = transform * vec4(vp, 1.0);\n"
"}\0";
I recommend you to use the glm library for that. You calculate your matrix and pass it to your shader like this:
auto transformMatrix = glm::rotate( /* your rotation calculation */ );
auto transLoc = glGetUniformLocation(shaderProgram, "transform");
glUniformMatrix4fv(transLoc , 1, GL_FALSE, glm::value_ptr(transformMatrix));

Opengl SOIL textureCoords not recognizing

I'm writing my program, that draws a quad with a texture. Here's my code:
#include <stdio.h>
#include <stdlib.h>
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <SOIL.h>
#include <math.h>
extern const char* VertexShaderCode;
extern const char* FragmentShaderCode;
int main()
{
glfwInit();
glewInit();
//initialize glew
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_RESIZABLE, GL_TRUE);
int width = 1920, height = 1080;
glViewport(0, 0, width, height);
GLFWwindow* window = glfwCreateWindow(width, height, "Opengl program", NULL, NULL);
glfwGetFramebufferSize(window, &width, &height);
if (!window)
{
printf("Someting is wrong with window");
}
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK)
{
printf("Something is wrong with glew");
}
/*GLfloat vertecies[] = {
0.75f, 0.75f, 0.0f, //0
0.75f, -0.75f, 0.0f, //1
-0.75f, -0.75f, 0.0f, //2
-0.75f, 0.75f, 0.0f, //3
0.5f, 0.5f, 0.0f, //0
0.5f, -0.5f, 0.0f, //1
-0.5f, -0.5f, 0.0f, //2
-0.5f, 0.5f, 0.0f, //3
};
GLuint index[] =
{
0,4,1,
4,1,5,
5,1,2,
6,2,5,
2,6,3,
6,7,3,
3,7,4,
3,4,0
};*/
GLfloat vertecies[] = {
//vertices colors
0.5f, 0.5f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, // Top Right
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // Bottom Right
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, // Bottom Left
-0.5f, 0.5f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f // Top Left
};
GLuint index[] =
{
0,1,3,
1,2,3
};
GLfloat texCoords[] = {
0.0f, 0.0f, // Lower-left corner
1.0f, 0.0f, // Lower-right corner
0.5f, 1.0f // Top-center corner
};
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
int widthText, heightText;
unsigned char* image = SOIL_load_image("container.jpg", &widthText, &heightText, 0, SOIL_LOAD_RGB);
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, widthText, heightText, 0, GL_RGB, GL_UNSIGNED_BYTE, image);
glGenerateMipmap(GL_TEXTURE_2D);
SOIL_free_image_data(image);
glBindTexture(GL_TEXTURE_2D, 0);
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
GLuint EBO;
glGenBuffers(1, &EBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(index), index, GL_STATIC_DRAW);
GLuint VAO;
glGenVertexArrays(1, &VAO);
GLuint VBO;
glGenBuffers(1, &VBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertecies), vertecies, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(index), index, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)0);
glEnableVertexAttribArray(0);
// Color attribute
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)(3 * sizeof(GLfloat)));
glEnableVertexAttribArray(1);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, 8 * sizeof(GLfloat), (GLvoid*)(6 * sizeof(GLfloat)));
glEnableVertexAttribArray(2);
glBindVertexArray(0);
const char* adapter[1];
GLuint vertexShaderID;
vertexShaderID = glCreateShader(GL_VERTEX_SHADER);
adapter[0] = VertexShaderCode;
glShaderSource(vertexShaderID, 1, adapter, 0);
glCompileShader(vertexShaderID);
GLint success;
GLchar infoLog[512];
glGetShaderiv(vertexShaderID, GL_COMPILE_STATUS, &success);
if (!success)
{
glGetShaderInfoLog(vertexShaderID, 512, NULL, infoLog);
printf("ERROR::SHADER::VERTEX::COMPILATION_FAILED %s\n", infoLog);
}
//declare fragment shader
GLuint fragmentShaderID;
//create fragment shader
fragmentShaderID = glCreateShader(GL_FRAGMENT_SHADER);
//set fragment shader source
adapter[0] = FragmentShaderCode;
glShaderSource(fragmentShaderID, 1, adapter, 0);
//compile fragment shader
glCompileShader(fragmentShaderID);
//declare a shader program
GLuint shaderProgram;
shaderProgram = glCreateProgram();
//attach haders to the program
glAttachShader(shaderProgram, vertexShaderID);
glAttachShader(shaderProgram, fragmentShaderID);
//link program
glLinkProgram(shaderProgram);
glUseProgram(shaderProgram);
//rendering
while (!glfwWindowShouldClose(window))
{
glfwPollEvents();
glClear(GL_COLOR_BUFFER_BIT);
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glBindTexture(GL_TEXTURE_2D, texture);
glUseProgram(shaderProgram);
glBindVertexArray(VAO);
//glEnableClientState(GL_VERTEX_ARRAY);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
//glDisableClientState(GL_VERTEX_ARRAY);
glBindVertexArray(0);
glfwSwapBuffers(window);
}
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glfwTerminate();
return 0;
}
The vertex attrib array is not setting texture vertex input as my third input. Can anyone see, whyt am I doing wrong? Oh and here's my shader code:
const char* VertexShaderCode =
"#version 330 core\r\n"
""
"layout(location=0) in vec3 position;"
"layout(location=1) in vec3 color;"
"layout(location=2) in vec2 textCoord;"
""
"out vec2 TextCoord;"
""
""
"out vec3 theColor;"
""
"void main()"
"{"
" gl_Position = vec4(position, 1.0);"
" theColor = color;"
" TextCoord = textCoord;"
"}";
const char* FragmentShaderCode =
"#version 330 core\r\n"
""
"in vec3 theColor;"
"in vec2 TexCoord;"
""
"uniform sampler2D ourTexture;"
""
""
""
"out vec4 color;"
""
"void main()"
"{"
" color = texture(ourTexture, TexCoord);"
"}";
Help.
The variable is called out vec2 TextCoord in the vertex shader, but in vec2 TexCoord; in the fragment shader (note the additional t in the vertex shader).
You are only checking the compile status of the vertex shader, but not the compile status of the fragment shader or the link status of the program.

OpenGL ES2 coloured triangle

I am trying to make basic triangle in opengl es 2, but whatever I do, it just doesn't seem to be working.. All I get is a white triangle. However, I think there might be a problem
when linking the program (that's I have disabled return from linking checking "if", otherwise it won't run at all). Also, when I run with the console, I get in the output "error: 1281". My code:
int loadShader(int type, char * shaderCode)
{
int length = strlen(shaderCode);
int shader = glCreateShader(type);
glShaderSource(shader, 1, (const char **) &shaderCode, NULL);
glCompileShader(shader);
return shader;
}
// Initializes the application data
int Init(void)
{
// Vertex shaders
char * vertexShaderCode =
"attribute vec4 vPosition; \n"
"void main() { \n"
" gl_Position = vPosition; \n"
"} \n";
char * fragmentShaderCode =
"precision mediump float; \n"
"void main() { \n"
" gl_FragColor = vec4(0.5, 0.5, 0.5, 1.0);\n"
"} \n";
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, vertexShaderCode);
GLuint fragmentShader = loadShader(GL_FRAGMENT_SHADER, fragmentShaderCode);
GLint linked;
program = glCreateProgram();
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glBindAttribLocation(program, 0, "vPosition");
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &linked);
if(!linked)
{
GLint infoLen = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &infoLen);
if(infoLen > 1)
{
char* infoLog = (char *) malloc(sizeof(char) * infoLen);
glGetProgramInfoLog(program, infoLen, NULL, infoLog);
free(infoLog);
}
glDeleteProgram(program);
//return 0;
}
// Basic GL setup
glClearColor (0.0, 0.0, 0.0, 1.0);
return GL_TRUE;
}
// Main-loop workhorse function for displaying the object
void Display(void)
{
// Clear the screen
glClear (GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glClearColor(0.0, 0.0, 0.0, 1.0);
GLfloat vertices[] = {
-0.5f, 0.0f, -1.0f,
0.0f, 1.0f, -1.0f,
0.5f, 0.0f, -1.0f,
};
GLfloat colors[] = {
0.0f, 0.0f, 1.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
};
glUseProgram(program);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, vertices);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, 0, colors);
glEnableVertexAttribArray(1);
glDrawArrays(GL_TRIANGLES, 0, 3);
}
enable the vertex attrib array first and then pass the pointers...
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, vertices);
glDrawArrays(GL_TRIANGLES, 0, 3);
Also you are sending colors in the "1" attrib array but there is no such attrib in the shader.