OpenGL fragment shader has no effect, how to determine the error? - c++

I'm running Mac OS X Lion and I'm trying to write a basic OpenGl program but my fragment shader isn't working. When I don't include it, I get my black triangle but when I do the screen is just white. I also get no errors loading it. What is the best way to debug this? Here are my shaders:
Vertex:
#version 120
attribute vec2 coord2d;
void main(void) {
gl_Position = vec4(coord2d, 0.0, 1.0);
}
Fragment:
#version 120
void main(void) {
gl_FragColor[0] = 1.0;
gl_FragColor[1] = 1.0;
gl_FragColor[2] = 0.0;
}
And the code to load my shaders I gained from this tutorial.
Edited to add more information
int init_resources()
{
GLfloat triangle_vertices[] = {
0.0f, 0.8f,
-0.8f, -0.8f,
0.8f, -0.8f,
};
glGenBuffers(1, &vbo_triangle);
glBindBuffer(GL_ARRAY_BUFFER, vbo_triangle);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangle_vertices), triangle_vertices, GL_STATIC_DRAW);
GLint link_ok = GL_FALSE;
GLuint vs, fs;
if ((vs = create_shader("vertShader.sh", GL_VERTEX_SHADER)) == 0) return 0;
if ((fs = create_shader("fragShader.sh", GL_FRAGMENT_SHADER)) == 0) return 0;
program = glCreateProgram();
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &link_ok);
if (!link_ok) {
fprintf(stderr, "glLinkProgram:");
print_log(program);
return 0;
}
const char* attribute_name = "coord2d";
attribute_coord2d = glGetAttribLocation(program, attribute_name);
if (attribute_coord2d == -1) {
fprintf(stderr, "Could not bind attribute %s\n", attribute_name);
return 0;
}
return 1;
}
void onDisplay()
{
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT| GL_DEPTH_BUFFER_BIT);
glUseProgram(program);
glEnableVertexAttribArray(attribute_coord2d);
glBindBuffer(GL_ARRAY_BUFFER, vbo_triangle);
glVertexAttribPointer(
attribute_coord2d,
2,
GL_FLOAT,
GL_FALSE,
0,
0
);
glDrawArrays(GL_TRIANGLES, 0, 3);
glDisableVertexAttribArray(attribute_coord2d);
glutSwapBuffers();
}
GLuint create_shader(const char* filename, GLenum type)
{
const GLchar* source = file_read(filename);
if (source == NULL) {
fprintf(stderr, "Error opening %s: ", filename); perror("");
return 0;
}
GLuint res = glCreateShader(type);
glShaderSource(res, 1, source, NULL);
free((void*)source);
glCompileShader(res);
GLint compile_ok = GL_FALSE;
glGetShaderiv(res, GL_COMPILE_STATUS, &compile_ok);
if (compile_ok == GL_FALSE) {
fprintf(stderr, "%s:", filename);
print_log(res);
glDeleteShader(res);
return 0;
}
return res;
}

Had to add gl_FragColor[3] = 1.0; as I wasn't setting opacity.

Meanwhile you found your solution, it seems, but I wanted to give you another tip, which helped me (also quite new to all this) a lot recently, particularly if you are working on Mac OS X:
I suggest you install the "Graphics Tools for XCode" (available from the developer downloads section), which will help you a lot to develop your shaders, in particular:
OpenGL Shader Builder which helps you develop and test shaders
OpenGL Profiler which not only helps you to profile your application, but you can also see what resources are loaded etc and in your case: You can set a breakpoint to stop execution (for example before swapping) and then view the shader that is loaded, and modify it at runtime, to see what effect the change has
Here is the User Guide for OpenGL Profiler:
https://developer.apple.com/library/mac/#documentation/GraphicsImaging/Conceptual/OpenGLProfilerUserGuide/Introduction/Introduction.html#//apple_ref/doc/uid/TP40006475-CH1-DontLinkElementID_31
Hope that helps

Related

glAttribLocation returning -1 even though vertex shader is actively using that

Searched Stack Overflow for similar questions in search of my solution , but it doesn't seem to be solved.
main.cpp :
#include"reader.h"
#include"window.h"
#include"shader.h"
int main() {
float vertices[] = {
-0.5f, -0.5f, 0.0f ,
0.5f, -0.5f, 0.0f ,
0.0f, 0.5f, 0.0f
};
Window window;
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
Shader shader;
shader.addShader("./src/shaders/basic.vtx",GL_VERTEX_SHADER);
shader.addShader("./src/shaders/basic.frg", GL_FRAGMENT_SHADER);
shader.compile();
shader.enable();
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices,GL_STATIC_DRAW);
GLint pos_in = glGetAttribLocation(shader.getProgram(), "pos_in");
if (pos_in < 0) {
std::cout << "pos_in not found\n";
}
glVertexAttribPointer(pos_in, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void *)0);
glEnableVertexAttribArray(pos_in);
while (!window.closed()) {
window.update();
glDrawArrays(GL_TRIANGLES,0,3);
}
return 0;
}
shader.h :
#pragma once
#include<glad/glad.h>
#include<iostream>
#include<vector>
#include"reader.h"
class Shader {
std::vector<GLuint*> shaders;
GLuint program;
public :
GLuint& getProgram() {
return program;
}
Shader() {
program = glCreateProgram();
}
void addShader(const char * path, GLenum type) {
std::string data = ShaderReader(path).read_shader();
const char * chardata = data.c_str();
GLuint shader = glCreateShader(type);
glShaderSource(shader, 1, &chardata , nullptr);
glCompileShader(shader);
int success;
char buffer[512];
glGetShaderiv(shader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(shader, 512, NULL, buffer);
std::cout << buffer << std::endl;
return;
}
std::cout << "shader inserted into vector\n";
shaders.push_back(&shader);
}
void compile(){
for (int i = 0; i != shaders.size();i++) {
glAttachShader(program, *shaders[i]);
}
glLinkProgram(program);
glValidateProgram(program);
glUseProgram(program);
int status;
glGetProgramiv(program, GL_COMPILE_STATUS, &status);
char buffer[512];
if (!status) {
glGetProgramInfoLog(program,512,NULL,buffer);
std::cout << buffer << std::endl;
return;
}
std::cout << "shader compilation successful\n";
}
void enable() {
glUseProgram(program);
}
void disable() {
glUseProgram(0);
}
~Shader() {
for (int i = 0; i != shaders.size();i++) {
glDeleteShader(*shaders[i]);
}
}
};
vertex shader written in basic.vtx:
#version 400
layout (location = 0 ) in vec3 pos_in ;
void main(){
gl_Position = vec4(pos_in.x , pos_in.y , pos_in.z , 1.0f);
}
fragment shader written in basic.frg :
#version 400
out vec4 color;
void main(){
color = vec4(0.0f, 0.5f , 0.5f , 1.0f);
}
At the time of calling glGetAttribLocation , the vertex shader IS USING pos_in attrib to set gl_Position , yet it returns -1 .
Also the triangle is not rendered when calling glGetAttribLocation() ; or rendered white with direct attrib pointer values like 0 , 1 with an openGL 1281 error.
One thing that is definitely not valid is the shaders.push_back(&shader):
void addShader(const char * path, GLenum type) {
// ....
GLuint shader = glCreateShader(type);
// ....
shaders.push_back(&shader);
}
With the shaders.push_back(&shader) you push back the address of a local variable to the shaders vector. So the glAttachShader(program, *shaders[i]); will result in undefined behavior.
shader holds only an numeric id so there is no need to get a pointer to that, just change the std::vector<GLuint*> shaders to std::vector<GLuint> shaders, use shaders.push_back(shader) and replace all *shaders[i] with shaders[i]
The reason why you don't get an linking error is most likely because the content at the address you get from &shader is not overwritten before you do the glAttachShader(program, *shaders[i]), and that both entries in the shaders vector hold same address of the stack. The result of is that glAttachShader is called each time on the same id, so you bind only the fragement shader to the program.

OpenGL drawing Triangles only white

I try learing OpenGL with those two tutorials:
https://learnopengl.com/#!Getting-started/Hello-Triangle and
https://www.youtube.com/playlist?list=PLEETnX-uPtBXT9T-hD0Bj31DSnwio-ywh
When I draw a simple triangle it is only white. But the code seems right.
This is the fragment Shader:
#version 330 core
out vec4 fragColor;
void main()
{
fragColor = vec4(0.5, 0.3, 0.1, 1.0);
}
This is the ShaderProgram:
ShaderProgram::ShaderProgram(std::string fileName)
{
vertexShader = glCreateShader(GL_VERTEX_SHADER);
if (vertexShader == 0)
std::cerr << "VertexShader creation failed! " << std::endl;
const char* vertexShaderSource = (SourceLoader(fileName + ".vs")).c_str();
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
CheckErrorMessages(vertexShader, GL_COMPILE_STATUS, false, "VertexShader Compilation failed! ");
fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
if (fragmentShader == 0)
std::cerr << "FragmentShader Creation failed! " << std::endl;
const char* fragmentShaderSource = (SourceLoader(fileName + ".fs")).c_str();
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
CheckErrorMessages(fragmentShader, GL_COMPILE_STATUS, false, "FragmentShader Compilation failed! ");
program = glCreateProgram();
glAttachShader(program, vertexShader);
glAttachShader(program, fragmentShader);
glLinkProgram(program);
CheckErrorMessages(program, GL_LINK_STATUS, true, "Program linking failed! ");
glValidateProgram(program);
CheckErrorMessages(program, GL_VALIDATE_STATUS, true, "Program validation failed! ");
}
I have three methods in the ShaderProgram class:
1. a method to load the shader code, which is definitly working.
2. a method to Check for Error Messages, which is also working.
3. and a bind() Funktion which is just using glUseProgram(program)
I also have a class for the window, which is created by SDL
Display::Display(std::string title, unsigned int width, unsigned int height)
{
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BUFFER_SIZE, 32);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
m_window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, width, height, SDL_WINDOW_OPENGL);
m_glcontext = SDL_GL_CreateContext(m_window);
GLenum status = glewInit();
if (status != GLEW_OK)
std::cerr << "GLEW failed to initialize!" << std::endl;
isClosed = false;
}
the Display class has a method to update and clear:
void Display::Update()
{
SDL_GL_SwapWindow(m_window);
SDL_Event e;
while (SDL_PollEvent(&e))
{
if (e.type == SDL_QUIT)
isClosed = true;
}
}
void Display::Clear(float red, float green, float blue, float alpha)
{
glClearColor(red, green, blue, alpha);
glClear(GL_COLOR_BUFFER_BIT);
}
I also have a class called Mesh to manage VAO and VBO etc.:
Mesh::Mesh(Vertex* vertices, unsigned int numVertices)
{
drawCount = numVertices;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
GLuint VBO;
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, numVertices * sizeof(vertices[0]), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(vertices[0]), (void*)0);
glEnableVertexAttribArray(0);
}
Mesh has one Funktion to draw the given vertices:
void Mesh::Draw()
{
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, drawCount);
glBindVertexArray(0);
}
The main Funktion consists out of a while loop:
int main(int argc, char* argv[])
{
SDL_Init(SDL_INIT_EVERYTHING);
Display display("Fenster", 1024, 840);
ShaderProgram shader("./res/Shader");
Vertex vertices[] = { Vertex(glm::vec3(1, 1, 0)), Vertex(glm::vec3(1, -1, 0)), Vertex(glm::vec3(-1, -1, 0)) };
Mesh mesh(vertices, 3);
while (!display.getIsClosed())
{
display.Clear(1.0f, 0.0f, 1.0f, 1.0f);
shader.Bind();
mesh.Draw();
display.Update();
}
SDL_Quit();
return 0;
}
The problem is I don´t get any Error, but the triangle keep staying white.
Thank you for helping!
Edit: Vertex Shader is here:
#version 330 core
layout (location = 0) in vec3 position;
void main()
{
gl_Position = vec4(position.x, position.y, position.z, 1.0);
}
If you check your infoLog you'd find that your shaders aren't compiling. You're assigning the shader source pointer to a temporary string that gets destroyed at the end of the line, and you're sending gibberish to the shader compiler:
const char* fragmentShaderSource = (SourceLoader(fileName + ".fs")).c_str();// After this line the pointer isn't valid
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
SourceLoader returns a temporary string, and you assign fragmentShaderSource pointer to the string. Then there is junk data there by the time you pass it to glShaderSource. You can do:
std::string vertShaderString = SourceLoader(fileName + ".vs");
const char* vertShaderSource = vertShaderString.c_str();
glShaderSource(vertexShader, 1, &vertShaderSource, NULL);
You also need to fix the same problem for the fragment shader.
Also you drew a clockwise winding triangle, which is fine, but by default OpenGL considers counterclockwise winding as front-facing.
Another thing, the reason you didn't catch the error is because you CheckErrorMessages doesn't do anything, you should be writing the infoLog to the string or something.

OpenGL -- glGenVertexArrays, "thread 1: exc_bad_access (code =1, address=0x0)"

I am writing a openGL program(C++) which draws a ground with two 3D objects above it. The programming tool I use is Xcode version 8.0(8A218a)(OSX 10.11.6).
my code(main.cpp):
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <iostream>
#include <fstream>
using namespace std;
using glm::vec3;
using glm::mat4;
GLint programID;
//initialize all OpenGL objects
GLuint groundVAO, groundVBO, groundEBO; //ground
bool checkStatus( //OK
GLuint objectID,
PFNGLGETSHADERIVPROC objectPropertyGetterFunc,
PFNGLGETSHADERINFOLOGPROC getInfoLogFunc,
GLenum statusType)
{
GLint status;
objectPropertyGetterFunc(objectID, statusType, &status);
if (status != GL_TRUE)
{
GLint infoLogLength;
objectPropertyGetterFunc(objectID, GL_INFO_LOG_LENGTH, &infoLogLength);
GLchar* buffer = new GLchar[infoLogLength];
GLsizei bufferSize;
getInfoLogFunc(objectID, infoLogLength, &bufferSize, buffer);
cout << buffer << endl;
delete[] buffer;
return false;
}
return true;
}
bool checkShaderStatus(GLuint shaderID) //OK
{
return checkStatus(shaderID, glGetShaderiv, glGetShaderInfoLog, GL_COMPILE_STATUS);
}
bool checkProgramStatus(GLuint programID) //OK
{
return checkStatus(programID, glGetProgramiv, glGetProgramInfoLog, GL_LINK_STATUS);
}
string readShaderCode(const char* fileName) //OK
{
ifstream meInput(fileName);
if (!meInput.good())
{
cout << "File failed to load..." << fileName;
exit(1);
}
return std::string(
std::istreambuf_iterator<char>(meInput),
std::istreambuf_iterator<char>()
);
}
void installShaders() //OK
{
GLuint vertexShaderID = glCreateShader(GL_VERTEX_SHADER);
GLuint fragmentShaderID = glCreateShader(GL_FRAGMENT_SHADER);
const GLchar* adapter[1];
//adapter[0] = vertexShaderCode;
string temp = readShaderCode("VertexShaderCode.glsl");
adapter[0] = temp.c_str();
glShaderSource(vertexShaderID, 1, adapter, 0);
//adapter[0] = fragmentShaderCode;
temp = readShaderCode("FragmentShaderCode.glsl");
adapter[0] = temp.c_str();
glShaderSource(fragmentShaderID, 1, adapter, 0);
glCompileShader(vertexShaderID);
glCompileShader(fragmentShaderID);
if (!checkShaderStatus(vertexShaderID) ||
!checkShaderStatus(fragmentShaderID))
return;
programID = glCreateProgram();
glAttachShader(programID, vertexShaderID);
glAttachShader(programID, fragmentShaderID);
glLinkProgram(programID);
if (!checkProgramStatus(programID))
return;
glDeleteShader(vertexShaderID);
glDeleteShader(fragmentShaderID);
glUseProgram(programID);
}
void keyboard(unsigned char key, int x, int y)
{
//TODO:
}
void sendDataToOpenGL()
{
//TODO:
//create solid objects here and bind to VAO & VBO
//Ground, vertices info
const GLfloat Ground[]
{
-5.0f, +0.0f, -5.0f, //0
+0.498f, +0.898, +0.0f, //grass color
+5.0f, +0.0f, -5.0f, //1
+0.498f, +0.898, +0.0f,
+5.0f, +0.0f, +5.0f, //2
+0.498f, +0.898, +0.0f,
-5.0f, +0.0f, +5.0f
};
GLushort groundIndex[] = {1,2,3, 1,0,3};
//Pass ground to vertexShader
//VAO
glGenVertexArrays(1, &groundVAO);
glBindVertexArray(groundVAO);
//VBO
glGenBuffers(1, &groundVBO);
glBindBuffer(GL_ARRAY_BUFFER, groundVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Ground), Ground, GL_STATIC_DRAW);
//EBO
glGenBuffers(1, &groundEBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, groundEBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(groundIndex), groundIndex, GL_STATIC_DRAW);
//connectToVertexShader
glEnableVertexAttribArray(0); //position
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 6, 0);
glEnableVertexAttribArray(1); //color
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 6, (char*)(sizeof(float)*3));
}
void paintGL(void)
{
//TODO:
//render your objects and control the transformation here
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//translate model
glm::mat4 modelTransformMatrix = glm::translate(glm::mat4(), vec3(+0.0f, +0.0f, -3.0f));
//perspective view
glm::mat4 projectionMatrix = glm::perspective(+40.0f, +1.0f, +1.0f, +60.0f);
//ultimate matrix
glm::mat4 ultimateMatrix;
//register location on the graphics cards
GLint ultimateMatrixUniformLocation = glGetUniformLocation(programID, "ultimateMatrix");
/*GLint modelTransformMatrixUniformLocation = glGetUniformLocation(programID, "modelTransformMatrix");
GLint projectionMatrixUniformLocation = glGetUniformLocation(programID, "projectionMatrix");*/
//drawing the ground
/*glUniformMatrix4fv(modelTransformMatrixUniformLocation, 1, GL_FALSE, &modelTransformMatrix[0][0]);
glUniformMatrix4fv(projectionMatrixUniformLocation, 1, GL_FALSE, &projectionMatrix[0][0]);*/
glBindVertexArray(groundVAO);
ultimateMatrix = projectionMatrix * modelTransformMatrix;
glUniformMatrix4fv(ultimateMatrixUniformLocation, 1, GL_FALSE, &ultimateMatrix[0][0]);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, 0);
glFlush();
glutPostRedisplay();
}
void initializedGL(void) //run only once
{
glewInit();
glEnable(GL_DEPTH_TEST);
sendDataToOpenGL();
installShaders();
}
int main(int argc, char *argv[])
{
/*Initialization*/
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutCreateWindow("Try");
glutInitWindowSize(700, 700);
//const GLubyte* glversion = glGetString(GL_VERSION);
/*Register different CALLBACK function for GLUT to response
with different events, e.g. window sizing, mouse click or
keyboard stroke */
initializedGL();
//glewExperimental = GL_TRUE;
glutDisplayFunc(paintGL);
glutKeyboardFunc(keyboard);
/*Enter the GLUT event processing loop which never returns.
it will call different registered CALLBACK according
to different events. */
//printf("OpenGL ver: %s\n", glversion);
glutMainLoop();
return 0;
}
VertexShaderCode.glsl:
#version 430 // GLSL version your computer supports
in layout(location=0) vec3 position;
in layout(location=1) vec3 vertexColor;
uniform mat4 ultimateMatrix;
out vec3 theColor;
void main()
{
vec4 v = vec4(position, 1.0);
gl_Position = ultimateMatrix * v;
theColor = vertexColor;
}
FragmentShaderCode.glsl:
#version 430 //GLSL version your computer supports
out vec4 theColor2;
in vec3 theColor;
void main()
{
theColor2 = vec4(theColor, 1.0);
}
Functions: checkStatus, checkShaderStatus, checkProgramStatus, readShaderCode, installShaders should be all fine.
void keyboard() can be ignored since I havent implemented it yet(just for keyboard control).
I implemented the object "Ground" in sendDataToOpenGL(). But when I compiled and ran the program, "thread 1: exc_bad_access (code =1, address=0x0)" occured in the line of VAO:
And the pop-out window is just a white screen instead of a green grass(3d).
I have tried a method which was provided in other stackoverflow post: using glewExperimental = GL_TRUE;. I didnt see any errors by using that, but the popout screen vanished immediately just after it appeared. It seems that it couldnt help the problem.
Can someone give me a help? Thank you!
glGenVertexArrays is available in since OpenGL version 3.0. If vertex array objects are supported can be checked by glewGetExtension("GL_ARB_vertex_array_object").
Glew can enable additional extensions by glewExperimental = GL_TRUE;. See the GLEW documantation which says:
GLEW obtains information on the supported extensions from the graphics driver. Experimental or pre-release drivers, however, might not report every available extension through the standard mechanism, in which case GLEW will report it unsupported. To circumvent this situation, the glewExperimental global switch can be turned on by setting it to GL_TRUE before calling glewInit(), which ensures that all extensions with valid entry points will be exposed.
Add this to your code:
glewExperimental = GL_TRUE;
glewInit();

Vertex shader not compiling due to a non-Ascii character?

So I started using OpenGL with glew and GLFW to create a game engine, and I almost immediately ran into a problem when starting working with shaders:
They are not being used or have no effect whatsoever if they are being used.
I have been checking my code with plenty of other examples, and they all match up, nothing looks out of place, and I am starting to run out of ideas and patience (I have been trying to figure out why for nearly a month now) with this.
My main core code is here:
#include "headers/Default.hpp"
//Window width and height variables
int windowWidth = 800;
int windowHeight = 600;
float Aspect = (float)windowWidth / (float)windowHeight;
//Buffer width and buffer height
int bufferWidth;
int bufferHeight;
double deltaTime;
double currentTime;
double newTime;
void CalculateDelta()
{
newTime = glfwGetTime();
deltaTime = newTime - currentTime;
currentTime = newTime;
}
//A call back function to get the window size
void UpdateWindowSize(GLFWwindow* window, int width, int height)
{
windowWidth = width;
windowHeight = height;
Aspect = (float)windowWidth / (float)windowHeight;
}
void UpdateFrameBufferSize(GLFWwindow* window, int width, int height)
{
bufferWidth = width;
bufferHeight = height;
}
//Starts on startup and creates an window context and starts the rendering loop
int main()
{
//Creates an engine startup log to keep
CreateStartupLog();
if (!glewInit())
{
WriteStartupLog("ERROR: GLEW failed to start\n");
return 1;
}
else
{
WriteStartupLog("INFO: GLEW initiated!\n");
}
//If glfw is not initiated for whatever reason we return an error
if (!glfwInit())
{
WriteStartupLog("ERROR: GLFW failed to start\n");
return 1;
}
else
{
WriteStartupLog("INFO: GLFW initiated!\n");
}
////////////////////////////////////////////////////////////////
// Window Section //
////////////////////////////////////////////////////////////////
//glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
//glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
//glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
//Gets the primary monitor of the PC and tells OpenGL to use that monitor
GLFWmonitor* monitor = glfwGetPrimaryMonitor();
const GLFWvidmode* videoMode = glfwGetVideoMode(monitor);
//Creates a GLFW window context that we can work with
GLFWwindow* gameWindow = glfwCreateWindow(windowWidth/*videoMode->width*/, windowHeight/*videoMode->height*/, "FireTech Engine", NULL/*monitor*/, NULL);
//If the game window is not able to be created, prints an error and terminates the program
if (!gameWindow)
{
WriteStartupLog("ERROR: GLFW could not create a window\n");
glfwTerminate();
return 1;
}
else
{
WriteStartupLog("INFO: GLFW created a window!\n\n");
}
//Makes the current context
glfwMakeContextCurrent(gameWindow);
//Sets the window callback function for size
glfwSetWindowSizeCallback(gameWindow, UpdateWindowSize);
glfwSetFramebufferSizeCallback(gameWindow, UpdateFrameBufferSize);
//Initiate GLEW
glewExperimental = GL_TRUE;
glewInit();
////////////////////////////////////////////////////////////////
// Functions to set up various systems of the game engine //
////////////////////////////////////////////////////////////////
//Calls function to create a log file for the game engine
CreateEngineLog();
//Calls the function to compile the default shaders
CompileDefaultShader();
//Calls the function to get and print out hardware and OpenGL version
//PrintHardwareInfo();
////////////////////////////////////////////////////////////////
// Game Code //
////////////////////////////////////////////////////////////////
Sprite testSprite;
//Rendering loop
while (!glfwWindowShouldClose(gameWindow))
{
CalculateDelta();
glClearColor(0.3, 0.6, 1.0, 0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//Viewport and ortho settings
glViewport(0, 0, windowWidth, windowHeight);
glOrtho(-1, 1, -1 / Aspect, 1 / Aspect, 0, 1);
//Draw a sprite
if (GLFW_PRESS == glfwGetKey(gameWindow, GLFW_KEY_F2))
{
testSprite.DebugDraw();
}
else
{
testSprite.Draw();
}
//Draws the stuff we just rendered
glfwSwapBuffers(gameWindow);
glLoadIdentity();
//Polls different events, like input for example
glfwPollEvents();
if (GLFW_PRESS == glfwGetKey(gameWindow, GLFW_KEY_F1))
{
int fps = GetFPS();
printf("FPS: ");
printf("%d\n", fps);
printf("Frequency: ");
printf("%f\n", 1/double(fps));
}
if (GLFW_PRESS == glfwGetKey(gameWindow, GLFW_KEY_ESCAPE))
{
glfwSetWindowShouldClose(gameWindow, 1);
}
}
glfwTerminate();
WriteEngineLog("PROGRAM EXITED: Window closed");
return 0;
}
Here is the shader.cpp code:
#include "../headers/Default.hpp"
string ReadShaderFile(char* path)
{
ifstream shaderFile;
shaderFile.open(path, std::ifstream::in);
string output;
if (shaderFile.is_open())
{
printf("Opened shader file located at: \"%s\"\n", path);
while (!shaderFile.eof())
{
output += shaderFile.get();
}
printf("Successfully read shader file located at: \"%s\"\n", path);
}
else
{
WriteEngineLog("ERROR: Could not read shader file!\n");
}
shaderFile.close();
return output;
}
Shader::Shader()
{
WriteEngineLog("WARNING: There was no path to any GLSL Shader files\n");
}
Shader::Shader(char* VertexShaderPathIn, char* FragmentShaderPathIn)
{
string vertexShaderString = ReadShaderFile(VertexShaderPathIn);
string fragmentShaderString = ReadShaderFile(FragmentShaderPathIn);
//Prints out the string to show the shader's code
printf("\n%s\n", vertexShaderString.c_str());
printf("\n%s\n", fragmentShaderString.c_str());
//Creates the GLchars needed to input the shader code
const GLchar* vertex_shader = vertexShaderString.c_str();
const GLchar* fragment_shader = fragmentShaderString.c_str();
//Creates a vertex shader and compiles it
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
WriteEngineLog("Blank vertex shader created\n");
glShaderSource(vertexShader, 1, &vertex_shader, NULL);
WriteEngineLog("Vertex shader given source\n");
glCompileShader(vertexShader);
//Compilation error checking begions here
GLint isVertexCompiled = 0;
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &isVertexCompiled);
if (isVertexCompiled == GL_FALSE)
{
//Gets the length of the log
GLint maxLength = 0;
glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH, &maxLength);
//Creates and writes the log to the errorLog
GLchar* errorLog = (GLchar*)malloc(maxLength);
glGetShaderInfoLog(vertexShader, maxLength, &maxLength, &errorLog[0]);
//Writes to the engine log with the shader error
WriteEngineLog("ERROR: Vertex shader failed to compile!\n");
printf("%s\n", (char*)errorLog);
//Frees the errorLog allocation
free(errorLog);
//Deletes the shader so it doesn't leak
glDeleteShader(vertexShader);
WriteEngineLog("ERROR: Aborting shader creation.\n");
return;
}
//Writes in the engine log to report successful compilation
WriteEngineLog("Vertex shader successfully compiled!\n");
//Creates a fragment shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
WriteEngineLog("Blank fragment shader created\n");
glShaderSource(fragmentShader, 1, &fragment_shader, NULL);
WriteEngineLog("Fragment shader given source\n");
glCompileShader(fragmentShader);
//Compilation error checking begions here
GLint isFragmentCompiled = 0;
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &isFragmentCompiled);
if (isFragmentCompiled == GL_FALSE)
{
//Gets the length of the log
GLint maxLength = 0;
glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH, &maxLength);
//Creates and writes the log to the errorLog
GLchar* errorLog = (GLchar*)malloc(maxLength);
glGetShaderInfoLog(vertexShader, maxLength, &maxLength, &errorLog[0]);
WriteEngineLog("ERROR: Fragment shader failed to compile\n");
printf("%s\n", (char*)errorLog);
//Frees the errorLog allocation
free(errorLog);
//Deletes the shader so it doesn't leak
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
WriteEngineLog("ERROR: Aborting shader creation.\n");
return;
}
//Writes in the engine log to report successful compilation
WriteEngineLog("Fragment shader successfully compiled!\n");
//Creates the final shader product
this->Program = glCreateProgram();
WriteEngineLog("Blank shader created\n");
glAttachShader(this->Program, vertexShader);
WriteEngineLog("Attatched Vertex shader to the shader\n");
glAttachShader(this->Program, fragmentShader);
WriteEngineLog("Attatched Fragment shader to the shader\n");
glLinkProgram(this->Program);
/*GLint isLinked = 0;
glGetProgramiv(this->Program, GL_LINK_STATUS, (int*)&isLinked);
if (isLinked == GL_FALSE)
{
//Gets the lngth of the shader info log
GLint maxLength = 0;
glGetProgramInfolog(ShaderOut, GL_INFO_LOG_LENGTH, &maxLength);
//Gets and puts the actual log into a GLchar
std::vector<GLchar> infoLog(maxLength);
glGetProgramInfoLog(ShaderOut, maxLength, &maxLength, &infoLog[0]);
//Deletes programs and shaders so they don't leak
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
WriteEngineLog((string)infoLog);
return;
}*/
WriteEngineLog("Shader linked!\n\n");
WriteEngineLog("INFO: Shader created!\n");
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
}
void Shader::Use()
{
glUseProgram(this->Program);
}
Here is the quad.cpp code:
#include "../headers/Default.hpp"
Quad::Quad()
{
position.x = 0;
position.y = 0;
scale.x = 1;
scale.y = 1;
VertexArray = CreateVertexArray();
}
//Quad constructor with one arg
Quad::Quad(Vector2 Position)
{
position = Position;
VertexArray = CreateVertexArray();
}
//Quad constructor with two args
Quad::Quad(Vector2 Position, Vector2 Scale)
{
position = Position;
scale = Scale;
VertexArray = CreateVertexArray();
}
GLuint Quad::CreateVertexArray()
{
GLfloat Vertices[] =
{
//VERTICES //COLORS //TEXCOORDS
0.5f, 0.5f, 0.0f, 0.0f, 0.0f, 0.0f, //1.0f, 1.0f, //Top Right Vertice
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f, //1.0f, 0.0f, //Top Left Vertice
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f//, 0.0f, 0.0f //Bottom Left Vertice
};
GLuint vbo, vao;
glGenVertexArrays(1, &vao);
glGenBuffers(1, &vbo);
glBindVertexArray(vao);
//Copy vertices into the buffer
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
//Attribute Pointers
//Position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)0);
glEnableVertexAttribArray(0);
//Color attribute
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)(3 * sizeof(GLfloat)));
glEnableVertexAttribArray(1);
//Unbinds the VAO
glBindVertexArray(0);
return vao;
}
//Quad debug drawing function
void Quad::DebugDraw()
{
//Use the default shader
DefaultShader.Use();
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
glBindVertexArray(VertexArray);
// draw points 0-3 from the currently bound VAO with current in-use shader
glDrawArrays(GL_TRIANGLES, 0, 3);
//glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0); //CAUSING A CRASH AT THE MOMENT
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
//Unbinds the VAO
glBindVertexArray(0);
}
Here is the sprite.cpp code:
#include "../headers/Default.hpp"
Sprite::Sprite()
{
position.x = 0;
position.y = 0;
}
Sprite::Sprite(Texture tex)
{
defaultTexture = tex;
currentTexture = tex;
}
Sprite::Sprite(Texture tex, Vector2 pos)
{
defaultTexture = tex;
currentTexture = tex;
position = pos;
}
Sprite::Sprite(Texture tex, Vector2 pos, Vector2 Scale)
{
defaultTexture = tex;
currentTexture = tex;
position = pos;
scale = Scale;
}
void Sprite::Draw()
{
//Binds the default shader again
glBindVertexArray(VertexArray);
//Use the default shader
DefaultShader.Use();
// draw points 0-3 from the currently bound VAO with current in-use shader
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
}
Here is my vertex shader and fragment shader code (In order):
//Vertex Shader
#version 330 core
layout (location = 0) in vec3 position; // The position variable has attribute position 0
layout (location = 1) in vec3 color;
out vec3 ourColor;
void main()
{
gl_Position = vec4(position, 1.0f); // See how we directly give a vec3 to vec4's constructor
ourColor = color;
}
//Fragment shader
#version 330 core
in vec3 ourColor;
out vec4 color;
void main()
{
color = ourColor;
}
And I'm getting a warning that my shader did not compile... error is that there is a non ascii character at line ZERO of the vertex shader.
I had exactly the same error. This is almost certainly due to Unicode Byte Order Marks, or similar unprinted characters generated by text editors.
These are common in the first characters of a unicode file, but can occur anywhere.
You can programmatically strip these from your shader source strings before compiling, but this could be costly if you are compiling many shaders. See the above link for the data to strip if you go this route.
An alternative is simply to keep the files in ANSI/ASCII format. I am sure most text editors have the facility to set/convert formats, but I will give Notepad++ as an example since it's what I use to edit GLSL:
Open the GLSL file.
Encoding -> Convert to ANSI. (Note that merely hitting "Encode in ANSI" will not strip the characters)
Save the file.
The above should also strip other characters prone to confusing GLSL parsers (and C/C++ in general).
You could inform the user(/developer) the files are in an incorrect format on load in debug builds.

SFML - Opengl VAO issue giving me an (1282) error

I am getting an error when trying to use VAO's inside of SFML and not sure if it is SFML or it is my own opengl code
GLenum err = glewInit();
if (err != GLEW_OK)
{
std::cout << "NOT WORKING" << std::endl;
}
std::vector<sf::Vector3f> g_vertext_buffer_data;
g_vertex_buffer_data.push_back({ -1.0f, -1.0f, 0.0f });
g_vertex_buffer_data.push_back({1.0f, -1.0f, 0.0f});
g_vertex_buffer_data.push_back({ 0.0f, 1.0f, 0.0f });
const char* vertexShaderSource =
"#version 330\n\
in vec4 position;\
void main(void){\ gl_Position = position;\
}";
// compile fragment shader source
const GLchar* fragmentShaderSource =
"#version 330\n\
void main(void) {\
out vec4 fragcolor; fragcolor= vec4(1.0,1.0,1.0,1.0);\
}";
/* Creating Shader */
this->programId = glCreateProgram();
this->vId = glCreateShader(GL_VERTEX_SHADER);
this->fId = glCreateShader(GL_FRAGMENT_SHADER);
/* Get Shader Size */
int vertexShaderLength = strlen(vertexShaderSource);
int fragmentShaderLength = strlen(fragmentShaderSource);
/* Loading and binding shader */
glShaderSource(this->vId, 1, &vertexShaderSource, NULL);
glShaderSource(this->fId, 1, &fragmentShaderSource, NULL);
/* Compile Shaders */
glCompileShader(vId);
glCompileShader(fId);
/* Attach Shaders */
glAttachShader(this->programId, this->vId);
glAttachShader(this->programId, this->fId);
/* Linkg program */
glLinkProgram(this->programId);
/* Use and bind attribute */
glUseProgram(this->programId);
this->positionId = glGetAttribLocation(this->programId, "position");
glUseProgram(0);
/* VAO Time */
glGenVertexArrays(1, &this->vaoId);
glBindVertexArray(this->vaoId);
/* VBO Time assigning to VAO */
glGenBuffers(1, &this->vboId);
glBindBuffer(GL_ARRAY_BUFFER, this->vboId);
glBufferData(GL_ARRAY_BUFFER, g_vertex_buffer_data.size() * sizeof(sf::Vector3f), &g_vertex_buffer_data[0], GL_STATIC_DRAW);
glEnableVertexAttribArray(this->positionId);
glVertexAttribPointer(this->positionId, 2, GL_FLOAT, GL_FALSE, sizeof(sf::Vector3f), 0);
/* Close out bindings */
glBindVertexArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
while(1)
{
glUseProgram(this->programId);
glBindVertexArray(this->vaoId);
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
glUseProgram(0);
gameWindow.glPushStates();
}
The error code I get is: opengl error in user code (1282)​
I have changed the size() issue that was brought up in the blBufferData() but still am getting the issue.
There is at least a problem with the size that is passed to glBufferData:
glBufferData(GL_ARRAY_BUFFER,
sizeof(g_vertex_buffer_data) * sizeof(sf::Vector3f),
g_vertex_buffer_data[0], GL_STATIC_DRAW);
sizeof(g_vertex_buffer_data) is equal to sizeof(std::vector<?>) which is the size of the vector object and not the size of the data contained. Try using
glBufferData(GL_ARRAY_BUFFER,
g_vertex_buffer_data.size() * sizeof(sf::Vector3f),
g_vertex_buffer_data[0], GL_STATIC_DRAW);
Another thing: In OpenGL 3.3 Core Profile there is no gl_FragColor variable. You will have to define an out variable.
Next: Your vertex shader seems to be empty. You have to write to gl_Position otherwise nothing will be shown.
Possible error codes for glGetAttribLocation are:
GL_INVALID_OPERATION
Which don't have a fixed value. Try to get the error string with gluErrorString() or take a look in the header to which of those 1282 maps.
• check your shader got compiled without error?
• check your shader got linked without error?
What type have positionId? All object id's must be GLuint type.
And btw allways enable shader compilation-linking error check, and debug will be more informative.
I do that in this way (OpenGL-ES 2.0):
m_nVertexShader = glCreateShader(GL_VERTEX_SHADER);
m_nPixelShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(m_nVertexShader, 1, &lpszVertexBuffer, NULL);
glShaderSource(m_nPixelShader, 1, &lpszFragmentBuffer, NULL);
glCompileShader(m_nVertexShader);
int iIsOk = 0;
glGetShaderiv(m_nVertexShader, GL_COMPILE_STATUS, &iIsOk);
if(!iIsOk)
{
GLint infoLen = 0;
glGetShaderiv(m_nVertexShader, GL_INFO_LOG_LENGTH, &infoLen);
if(infoLen > 1)
{
char* infoLog = (char*)malloc(sizeof(char) * infoLen);
glGetShaderInfoLog(m_nVertexShader, infoLen, NULL, infoLog);
QMessageBox::warning(this, QString("Error"),
QString(infoLog), QMessageBox::Yes | QMessageBox::Cancel, QMessageBox::Yes);
free(infoLog);
}
glDeleteShader(m_nVertexShader);
return;
}
glCompileShader(m_nPixelShader);
glGetShaderiv(m_nPixelShader, GL_COMPILE_STATUS, &iIsOk);
if(!iIsOk)
{
GLint infoLen = 0;
glGetShaderiv(m_nPixelShader, GL_INFO_LOG_LENGTH, &infoLen);
if(infoLen > 1)
{
char* infoLog = (char*)malloc(sizeof(char) * infoLen);
glGetShaderInfoLog(m_nPixelShader, infoLen, NULL, infoLog);
QMessageBox::warning(this, QString("Error"),
QString(infoLog), QMessageBox::Yes | QMessageBox::Cancel, QMessageBox::Yes);
free(infoLog);
}
glDeleteShader(m_nPixelShader);
return;
}
m_nProgram = glCreateProgram();
glAttachShader(m_nProgram, m_nVertexShader);
glAttachShader(m_nProgram, m_nPixelShader);
glBindAttribLocation(m_nProgram, 0, "rm_Vertex");
glLinkProgram(m_nProgram);
glGetProgramiv(m_nProgram, GL_LINK_STATUS, &iIsOk);
// Fail to pass status validation
if(!iIsOk)
{
GLint infoLen = 0;
glGetProgramiv(m_nProgram, GL_INFO_LOG_LENGTH, &infoLen);
if(infoLen > 1)
{
char* infoLog = (char*)malloc(sizeof(char) * infoLen);
glGetProgramInfoLog(m_nProgram, infoLen, NULL, infoLog);
QMessageBox::warning(this, QString("Error"),
QString(infoLog), QMessageBox::Yes | QMessageBox::Cancel, QMessageBox::Yes);
free(infoLog);
}
glDeleteProgram(m_nProgram);
return;
}
glUseProgram(m_nProgram);
As you use GLSL 3.3, fist you must specify fragment rendertarget output by calling
glBindFragDataLocation(this->programId, 0, "fragcolor");
Secondly your fragment shader must be like
"#version 330
out vec4 fragcolor;
void main(void) {
fragcolor= vec4(1.0,1.0,1.0,1.0);
}
The example of using this kind of shaders is on OpenGL 3.3 + GLSL 1.5 Sample.