So I've coded a model loader in OpenGL, and, it does in fact loads models, but for some reason, I can't see the resulting triangle on the screen. Take a look for yourself:
#define GLEW_STATIC
#include <GL\glew.h> // Graphics Libraries
#include <GLFW\glfw3.h>
#include <iostream>
#include <fstream>
#include <vector>
#include <string>
int main()
{
std::cout << "Please enter the name of the mesh file: ";
std::string FileName;
std::cin >> FileName;
std::cout << "You entered: " << FileName << std::endl;
if (!glfwInit())
{
std::cout << "Failed to initialize GLFW" << std::endl;
system("Pause");
return EXIT_FAILURE;
}
std::cout << "GLFW 3.0.4 Initialized" << std::endl;
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
GLFWwindow* Window = glfwCreateWindow(800, 600, "Window", NULL, NULL);
glfwMakeContextCurrent(Window);
if (Window == NULL)
{
std::cout << "Failed to create an OpenGL 3.3 context" << std::endl;
system("Pause");
return EXIT_FAILURE;
}
std::cout << "Created an OpenGL 3.3 context" << std::endl;
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK)
{
std::cout << "Failed to initialize GLEW 1.11.0" << std::endl;
system("Pause");
return EXIT_FAILURE;
}
std::ifstream FileStream(FileName);
std::vector <float> Vertices;
if (!FileStream)
{
std::cout << "An error was encountered while opening "<< FileName << std::endl;
system("Pause");
return EXIT_FAILURE;
}
float coordinateX, coordinateY, coordinateZ;
std::string Character;
while (!FileStream.eof())
{
FileStream >> Character;
if (Character == "v")
{
FileStream >> coordinateX >> coordinateY >> coordinateZ;
std::cout << "Loading in " << coordinateX << " " << coordinateY << " " << coordinateZ << std::endl;
Vertices.push_back(coordinateX);
Vertices.push_back(coordinateY);
Vertices.push_back(coordinateZ);
}
else
{
std::cout << "Skipping " << Character << std::endl;
continue;
}
}
std::cout << "Loaded " << FileName << std::endl;
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
unsigned int VBO;
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(float) * Vertices.size(), &Vertices.front(), GL_STATIC_DRAW);
unsigned int VAO;
glGenVertexArrays(1, &VAO);
glBindVertexArray(VAO);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
const char* Vertex_Shader =
"#version 330\n"
"in vec3 vp;"
"void main () {"
" gl_Position = vec4 (vp, 1.0);"
"}";
const char* Fragment_Shader =
"#version 330\n"
"out vec4 frag_colour;"
"void main () {"
" frag_colour = vec4 (0.5, 0.0, 0.5, 1.0);"
"}";
unsigned int VertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(VertexShader, 1, &Vertex_Shader, NULL);
glCompileShader(VertexShader);
unsigned int FragmentShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(FragmentShader, 1, &Fragment_Shader, NULL);
glCompileShader(FragmentShader);
unsigned int Shader_Program = glCreateProgram();
glAttachShader(Shader_Program, FragmentShader);
glAttachShader(Shader_Program, VertexShader);
glLinkProgram(Shader_Program);
while (!glfwWindowShouldClose(Window))
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(Shader_Program);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, Vertices.size());
glfwPollEvents();
glfwSwapBuffers(Window);
}
return EXIT_SUCCESS;
}
And the loaded file is as follows:
v 0.0 0.5 0.0
v 0.5 -0.5 0.0
v -0.5 -0.5 0.0
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
const char* Vertex_Shader =
"#version 330\n"
"in vec3 vp;"
"void main () {"
" gl_Position = vec4 (vp, 1.0);"
"}";
There's no guarantee that attributes will be allocated attribute slots in declaration order.
You need to tell OpenGL (via a location qualifier or glBindAttribLocation() pre-link) where to put it or ask (glGetAttribLocation() post-link) where it was put.
There are at least 3 problems in this code:
It creates two vertex shader objects, and no fragment shader object. Replace:
unsigned int FragmentShader = glCreateShader(GL_VERTEX_SHADER);
by:
unsigned int FragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
The third argument to glDrawArrays() must be the number of vertices, while this code passes in the total number of coordinates. Since 3 coordinates per vertex are used here, change the call to:
glDrawArrays(GL_TRIANGLES, 0, Vertices.size() / 3);
The location of the vertex attribute is not specified. This might work by "accident", particularly as long as there is only one attribute. But it's much safer to specify the location. The easiest way to do this is to add the location to the declaration of vp the vertex shader:
"layout(location = 0) in vec3 vp;"
You are binding your GL_ARRAY_BUFFER wrong
You have a bug in your shader creation
Your shaders are wrong
1) Simply bind (and create) your GL_ARRAY_BUFFER after your glBindVertexArray call.
2) As already said in another answer, your fragment shader creation is wrong and should read: unsigned int FragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
3) You are taking for granted that you will get the right attributes in your shaders, this is bad practice. You should be explicit and have a layout similar to this: layout(location = X) in vec3 myVec;
also as a few tips:
There's no need to re-bind your VAO within the loop, just bind it before while(!glfwWindowShouldClose(Window)). Same goes for your shader program.
You should be checking that your shaders compiled correctly and that your program linked correctly! have a look at glGetShaderiv, glGetProgramiv with GL_COMPILE_STATUS and GL_LINK_STATUS respectively; and glGetShaderInfoLog and glGetProgramInfoLog if you don't get GL_TRUE for either status.
This will give you a message telling you the exact problem you get when compiling/linking so you can debug your shader code easily without having to guess.
Related
So I've been starting with OpenGL by following this tutorial and managed to get everything to draw fine, but when I resize my viewport, the vao, vbo and shader program are unbound. I need to rebind everything and resend the buffer data, I don't know if this is the expected behaviour, but it seems a bit ridiculous to me so maybe I'm missing something.
I did some searching and tried couple of different ways (with/without vaos, using glVertexAttribFormat / glVertexAttribBinding ...) with no progress.
I made a logGl functions to get as much information as I could:
void logGl() {
int value, value2;
glGetIntegerv(GL_ARRAY_BUFFER_BINDING, &value);
std::cout << "bound buffer: " << value << std::endl;
glGetIntegerv(GL_VERTEX_ARRAY_BINDING, &value);
std::cout << "bound vao: " << value << std::endl;
glGetIntegerv(GL_CURRENT_PROGRAM, &value);
std::cout << "bound program: " << value << std::endl;
glGetIntegerv(GL_MAJOR_VERSION, &value);
glGetIntegerv(GL_MINOR_VERSION, &value2);
std::cout << "GL version: " << value << "." << value2 << std::endl;
float data[9];
glGetBufferSubData(GL_ARRAY_BUFFER, 0, 9 * sizeof(float), data);
std::cout << "Buffer Data: ";
for (size_t i=0; i<9; i++) { std::cout << data[i] << " ";};
std::cout << std::endl;
}
this is the output:
Shader Program ID: 3
bound buffer: 1
bound vao: 1
bound program: 3
GL version: 4.6
Buffer Data: -0.5 -0.5 0 0 0.5 0 0.5 -0.5 0
changing viewport *******
bound buffer: 0
bound vao: 0
bound program: 0
GL version: 4.6
Buffer Data: 1.23555e-31 4.59163e-41 28.875 0 30.0201 0 2.05719e+32 4.55772e-41 1.23552e-31
We see everything is unbound (Here I recreate shader program)
Shader Program ID: 39
Viewport size: 0 0 801 600
In main loop, just before isssuing draw call:
bound buffer: 1
bound vao: 0
bound program: 39
GL version: 4.6
Buffer Data: 3.8357e+34 4.55772e-41 3.47159e+34 4.55772e-41 4.56443e+34 4.55772e-41 1.5695e+11 3.09e-41 1.23552e-31
Error: 1282
--------------------------------
The error code 1282 did not really provide me with any clue, I did not get any compiling / linking errors from my setShaders function.
This is my main loop, I can provide the implementations of the shader function if needed, but it's pretty much tutorial textbook..
int main() {
// Init SDL, get GL context, set initial viewport init glew ...
Systems sys = initWindow(800, 600);
// Init a buffer
float vertices[] = {
-0.5f, -0.5f, 0.0f,
0.0f, 0.5f, 0.0f,
0.5f, -0.5f, 0.0f
};
unsigned int vbo, vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao); // bind first!
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3*sizeof(float), (void *) 0);
glEnableVertexAttribArray(0);
unsigned int shader = setShaders("Shaders.txt");
logGl();
bool running = true;
while (running) {
bool log = false;
SDL_Event event;
while (SDL_PollEvent(&event)) {
switch (event.type) {
case SDL_QUIT:
running = false;
case SDL_KEYDOWN:
if (event.key.keysym.sym == SDLK_ESCAPE) running = false;
case SDL_WINDOWEVENT:
switch (event.window.event) {
case SDL_WINDOWEVENT_RESIZED:
int dims[4];
std::cout << "changing viewport ******* \n";
glViewport(0, 0, event.window.data1, event.window.data2);
// glViewport(0, 0, 500, 500);
logGl();
std::cout << "We see everything is unbound" << std::endl;
// Updating viewport unbinds vao, shader program
// and seems to empty vbo data
// since I get this error code after viewport call at glDraw:
shader = setShaders("Shaders.txt");
glUseProgram(shader);
glGetIntegerv(GL_VIEWPORT, dims);
std::cout << "Viewport size: "
<< dims[0] << " "
<< dims[1] << " "
<< dims[2] << " "
<< dims[3] << std::endl;
log = true;
}
}
}
// random bg color
Uint32 rdCol = SDL_GetPerformanceCounter();
glClearColor((float) (rdCol & 0x0000FF00) / 0x0000FF00,
(float) (rdCol & 0x00FF0000) / 0x00FF0000,
(float) (rdCol & 0xFF000000) / 0xFF000000, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Why is this not needed ..???
glBindVertexArray(vao);
///////////////////////////////////////////////////////////////////////
// *** The four lines below are necessary to keep drawing on win resize
glBindBuffer(GL_ARRAY_BUFFER, vbo);
// glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
//
// glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void *) 0);
// glEnableVertexAttribArray(0);
///////////////////////////////////////////////////////////////////////
if (log) {
std::cout << "In main loop, just before isssuing draw call:" << std::endl;
logGl();
std::cout << "Error: " << glGetError() << std::endl;
std::cout << "--------------------------------" << std::endl;
}
glDrawArrays(GL_TRIANGLES, 0, 3);
/* 'flip' Window */
SDL_GL_SwapWindow(sys.window);
}
SDL_FreeSurface(sys.winSurf);
SDL_DestroyWindow(sys.window);
SDL_Quit();
return 0;
}
So the big question is why eveything gets unbound when I call glViewport, as I would believe it should keep drawing only by binding vao and vbo (which works until I resize the window)
I have found the culprit, which is that when resizing the window in SDL, it creates a new OpenGL context and thus removes every binding made. So, I could either rebind everything (ridiculous but it works) or use GLFW, SFML or other librairies.
I found my answer here, if anyone ever encounters this problem.
I am trying to use a combination of SFML and OpenGL for a project, but I am having trouble rendering to a sf::RenderTexture. Specifically, if I try to draw while the RenderTexture is active, I crash. (Looks like a null pointer dereference inside glDrawElements.)
Rendering directly to the window works fine. And if I manually create a framebuffer through OpenGL myself, that also works fine. But I'd like to be able to use RenderTexture if possible to simplify a lot of the code.
I may be doing something stupid, but I'm still new to OpenGL so I'm not sure. (Especially with the mixture of SFML and OpenGL, it seems like a lot of stuff can break if you don't manage the context switching correctly.) I'm not seeing any warnings from OpenGL or SFML.
The following reproduces the issue I'm seeing (Windows 10, Visual Studio 2017, OpenGL 4.5, GLEW 2.1.0, SFML 2.4.0):
#include <iostream>
#include <string>
#include <SFML/Window.hpp>
#include <SFML/Graphics.hpp>
#define GL_GLEXT_PROTOTYPES
#include <GL/glew.h>
#include <SFML/OpenGL.hpp>
GLenum glCheckError_(const char *file, int line)
{
GLenum errorCode;
while ((errorCode = glGetError()) != GL_NO_ERROR)
{
std::string error;
switch (errorCode)
{
case GL_INVALID_ENUM: error = "INVALID_ENUM"; break;
case GL_INVALID_VALUE: error = "INVALID_VALUE"; break;
case GL_INVALID_OPERATION: error = "INVALID_OPERATION"; break;
case GL_STACK_OVERFLOW: error = "STACK_OVERFLOW"; break;
case GL_STACK_UNDERFLOW: error = "STACK_UNDERFLOW"; break;
case GL_OUT_OF_MEMORY: error = "OUT_OF_MEMORY"; break;
case GL_INVALID_FRAMEBUFFER_OPERATION: error = "INVALID_FRAMEBUFFER_OPERATION"; break;
}
std::cerr << error << " | " << file << " (" << line << ")" << std::endl;
}
return errorCode;
}
#define glCheckError() glCheckError_(__FILE__, __LINE__)
int main()
{
sf::RenderWindow window(sf::VideoMode(800, 600, 32), "test");
glewInit();
std::cout << "Using OpenGL " << window.getSettings().majorVersion << "." << window.getSettings().minorVersion << std::endl;
//std::cout << "Available GL extensions: " << glGetString(GL_EXTENSIONS) << std::endl;
sf::Shader shader;
{ // Shader
const char* vs = R"(
#version 330 core
layout (location = 0) in vec3 pos;
void main()
{
gl_Position = vec4(pos, 1.0);
}
)";
const char* fs = R"(
#version 330 core
out vec4 color;
void main()
{
color = vec4(0.3, 0.8, 0.2, 1.0);
}
)";
shader.loadFromMemory(vs, fs);
}
unsigned int vao;
{ // Mesh
float vertices[] = {
0.3f, 0.5f, 1.0f, // top right
0.5f, -0.5f, -0.5f, // bottom right
-0.5f, -0.5f, -1.0f, // bottom left
-0.3f, 0.5f, 0.5f, // top left
};
unsigned int indices[] = {
0, 3, 1, // first triangle
1, 3, 2, // second triangle
};
unsigned int vbo, ebo;
glGenVertexArrays(1, &vao);
glCheckError();
glGenBuffers(1, &vbo);
glCheckError();
glGenBuffers(1, &ebo);
glCheckError();
glBindVertexArray(vao);
glCheckError();
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glCheckError();
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glCheckError();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ebo);
glCheckError();
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
glCheckError();
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glCheckError();
glEnableVertexAttribArray(0);
glCheckError();
glBindBuffer(GL_ARRAY_BUFFER, 0);
glCheckError();
glBindVertexArray(0);
glCheckError();
}
sf::RenderTexture texture;
sf::Sprite sprite;
{ // Render Texture
if (!texture.create(800, 600, true)) {
std::cerr << "Failed to create RenderTexture" << std::endl;
}
sprite.setTexture(texture.getTexture());
}
int frame = 0;
while (window.isOpen())
{
++frame;
sf::Event event;
while (window.pollEvent(event))
{
if (event.type == sf::Event::Closed)
{
window.close();
}
}
window.clear();
if (frame > 1)
{
window.popGLStates();
}
{ // Render to screen
sf::Shader::bind(&shader);
glBindVertexArray(vao);
glCheckError();
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glCheckError();
glBindVertexArray(0);
glCheckError();
sf::Shader::bind(nullptr);
}
window.pushGLStates();
window.display();
// Press space to continue...
bool waiting = true;
while (waiting) {
while (window.pollEvent(event))
{
if (event.type == sf::Event::KeyPressed && event.key.code == sf::Keyboard::Space)
{
waiting = false;
break;
}
}
}
window.clear();
if (frame > 1)
{
window.popGLStates();
}
{ // Render to texture
sf::Shader::bind(&shader);
glBindVertexArray(vao);
glCheckError();
texture.pushGLStates();
if (!texture.setActive(true)) { // TODO Setting the texture as active is causing me to segfault, messing up my state somehow
std::cerr << "Failed to activate RenderTexture" << std::endl;
}
texture.clear();
texture.popGLStates();
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0); // <-- Crashes here!
glCheckError();
texture.pushGLStates();
texture.display();
if (!texture.setActive(false)) {
std::cerr << "Failed to deactivate RenderTexture" << std::endl;
}
texture.popGLStates();
glBindVertexArray(0);
glCheckError();
sf::Shader::bind(nullptr);
}
window.pushGLStates();
window.draw(sprite);
window.display();
}
};
Does anyone have any ideas?
EDIT: Well I've solved the crashing part. sf::RenderTextures have their own GL context, and I guess you can't reuse data between contexts. So I had to generate the texture first and use texture.setActive() before generating the shader and mesh. That way the context has those objects available.
Now I am just getting a black screen. I am able to draw a new sf::RectangleShape to the same RenderTexture, but my GL mesh doesn't seem to be drawing. Still investigating...
In case anyone has the same issue, here are snippets of what I had to change:
// --- initialization ---
// Generate the texture first so its context is available
sf::RenderTexture texture;
sf::Sprite sprite;
{ // Render Texture
if (!texture.create(800, 600, true)) {
std::cerr << "Failed to create RenderTexture" << std::endl;
}
sprite.setTexture(texture.getTexture());
}
// Generate the rest of the data within the texture's context
sf::Shader shader;
{ // Shader
if (!texture.setActive(true)) {
std::cerr << "Failed to activate RenderTexture" << std::endl;
}
shader.loadFromMemory(vs, fs);
if (!texture.setActive(false)) {
std::cerr << "Failed to deactivate RenderTexture" << std::endl;
}
}
unsigned int vao;
{ // Mesh
if (!texture.setActive(true)) {
std::cerr << "Failed to activate RenderTexture" << std::endl;
}
unsigned int vbo, ebo;
glGenVertexArrays(1, &vao);
glCheckError();
glGenBuffers(1, &vbo);
glCheckError();
// ...
glBindBuffer(GL_ARRAY_BUFFER, 0);
glCheckError();
glBindVertexArray(0);
glCheckError();
if (!texture.setActive(false)) {
std::cerr << "Failed to deactivate RenderTexture" << std::endl;
}
}
// --- drawing ---
{ // Render to texture
// Make sure we use the appropriate context for all drawing to texture
if (!texture.setActive(true)) {
std::cerr << "Failed to activate RenderTexture" << std::endl;
}
texture.clear();
sf::Shader::bind(&shader);
glBindVertexArray(vao);
glCheckError();
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0); // TODO rendering nothing here...
glCheckError();
glBindVertexArray(0);
glCheckError();
sf::Shader::bind(nullptr);
// Drawing to the texture through SFML works fine
texture.pushGLStates();
sf::RectangleShape rect(sf::Vector2f(20, 20));
rect.setFillColor(sf::Color::Cyan);
texture.draw(rect);
texture.popGLStates();
texture.display();
if (!texture.setActive(false)) {
std::cerr << "Failed to deactivate RenderTexture" << std::endl;
}
}
if (!window.setActive(true)) {
std::cerr << "Failed to activate window" << std::endl;
}
window.pushGLStates();
window.draw(sprite);
window.display();
EDIT 2: Drawing problems solved, see my answer.
I found the answer! All thanks to "Groogy" in this thread: https://en.sfml-dev.org/forums/index.php?topic=7446.0
I had skimmed that thread earlier, which prompted me to add texture.setView(texture.getDefaultView()); when creating the RenderTexture. However that was not enough, I instead had to call glViewport with the texture bound. (I assumed that is what sf::View would do under the covers, but apparently that is not the case.)
So my RenderTexture creation now looks like this:
sf::RenderTexture texture;
sf::Sprite sprite;
{ // Render Texture
if (!texture.create(800, 600, true)) {
std::cerr << "Failed to create RenderTexture" << std::endl;
}
if (!texture.setActive(true)) {
std::cerr << "Failed to activate texture" << std::endl;
}
sprite.setTexture(texture.getTexture());
glViewport(0, 0, 800, 600); // <-- Required
glCheckError();
if (!texture.setActive(false)) {
std::cerr << "Failed to deactivate texture" << std::endl;
}
}
And now things are working.
I expect my program to draw me a simple red triangle. The vertex and fragment shaders are supposed to be loaded from external files via my loadShader() function, but for some weird reason my loadShader() function is reading non-ASCII characters, such that shader compile errors are getting generated.
Attempts to convert both of my shader files to an ASCII format following the instructions provided here (using Notepad++) failed, since the outcome is the same -- namely, the shader compiler error regarding the non-ASCII characters (see screenshots below) and a white instead of an expected red triangle (due to the shader not compiling).
Further Troubleshooting Attempts:
(Note: I additionally uploaded my source code to Pastebin for easy line number referencing.)
The critical code parts go from 14 to 44 -- my loadShader function.
The "tell file size" section starting at line 22 is working properly as evidenced in the screenshots below, since my debug output (line 25) has the same byte number as the file size provided by the Windows Explorer.
Furthermore, the buffer (in line 28) is corresponding exactly to the shader file sizes as evidenced in the debug output in line 41 (see screenshots).
Lastly, the syntaxes of my two shaders are correct, since I previously hard-coded them and the result was the desired red triangle rendering.
Screenshot:
Source Code:
// Expected result: Draws a simple red colored triangle to the screen
// Problem to debug: Why does my loadShader function read non-ASCII characters?
#include <glad/glad.h>
#define GLFW_DLL
#include <GLFW\glfw3.h>
#include <cstdio>
#include <iostream>
// TODO: Debug
/* Loads shader text files from a given file name (extension required)
* and returns the shader code as a null terminated string from that file.
*/
const char * loadShader(const char * shaderFileName) {
FILE * shaderFile{};
fopen_s(&shaderFile, shaderFileName, "r");
if (!shaderFile) {
std::cerr << "ERROR: Cannot open file" << std::endl;
return "\0";
}
// Tell file size
fseek(shaderFile, 0L, SEEK_END);
unsigned long shaderFileSize{};
shaderFileSize = ftell(shaderFile);
std::cout << "DEBUG: shaderFileSize: " << shaderFileSize << std::endl; // Debug output
rewind(shaderFile);
// Read from file
char * buffer = (char *)malloc(sizeof(char)*(shaderFileSize+1UL));
if (!buffer) {
std::cerr << "ERROR: Failed to allocate memory" << std::endl;
return "\0";
}
int c{};
int i = 0;
while ((c = fgetc(shaderFile))!= EOF) {
buffer[i++] = c;
}
// Put '\0' at the end of the buffer (required for OpenGL)
buffer[shaderFileSize] = '\0';
std::cout << "DEBUG: buffer: " << buffer << std::endl; // Debug output
std::cout << "DEBUG: strlen: " << strlen(buffer) << std::endl; // Debug output
fclose(shaderFile);
return buffer;
} // end of loadShader()
int main() {
// Initialize GLFW
if (!glfwInit()) {
std::cerr << "ERROR: Failed to initialize GLFW3" << std::endl;
return -1;
}
// Create window
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(640, 480, "OpenGL Game", nullptr, nullptr);
if (!window) {
std::cerr << "ERROR: Failed to create window with GLFW3" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Load all OpenGL function pointers.
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress)) {
std::cerr << "ERROR: Failed to initialize GLAD" << std::endl;
return -1;
}
// Get info from renderer
const GLubyte* rendererName = glGetString(GL_RENDERER);
const GLubyte* OpenGLVersionSupported = glGetString(GL_VERSION);
std::cout << rendererName << std::endl << OpenGLVersionSupported << std::endl;
// Enable depth
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
// Define triangle
GLfloat points[] = { 0.0f, 0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
-0.5f, -0.5f, 0.0f };
// Create buffer object
GLuint vertexBufferObject = 0;
glGenBuffers(1, &vertexBufferObject);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glBufferData(GL_ARRAY_BUFFER, sizeof(points), points, GL_STATIC_DRAW);
// Create vertex attribute object
GLuint vertexAttributeObject = 0;
glGenVertexArrays(1, &vertexAttributeObject);
glBindVertexArray(vertexAttributeObject);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexBufferObject);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, nullptr);
// Load shaders
const char * vertexShaderCode = loadShader("VertexShader.glsl");
const char * fragmentShaderCode = loadShader("FragmentShader.glsl");
// Compile shaders
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderCode, nullptr);
glCompileShader(vertexShader);
// Check vertex shader for compile errors
int success = 0;
char message[512] = "";
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(vertexShader, 512, nullptr, message);
std::cerr << "ERROR: Failed to compile vertex shader" << std::endl << message;
}
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderCode, nullptr);
glCompileShader(fragmentShader);
// Check fragment shader for compile errors
success = 0;
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(fragmentShader, 512, nullptr, message);
// TODO: Specify error type in message
std::cerr << "ERROR: Failed to compile fragment shader" << std::endl << message;
}
// Create shader program and link it
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glLinkProgram(shaderProgram);
// Check for linking errors
glGetProgramiv(shaderProgram, GL_LINK_STATUS, &success);
if (!success) {
glGetShaderInfoLog(shaderProgram, 512, nullptr, message);
// TODO: Specify error type in message
std::cerr << "ERROR: Failed to link shaders" << std::endl << message;
}
// Render loop
while (!glfwWindowShouldClose(window)) {
// Wipe the drawing surface clear
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Use shader program and vertex attribute object
glUseProgram(shaderProgram);
glBindVertexArray(vertexAttributeObject);
// Draw from the currently bound vertex attribute object
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwPollEvents();
glfwSwapBuffers(window);
}
// Exit program
glfwTerminate();
return 0;
} // end of main()
0xcd is a value used by MSVC CRT to fill uninitialized memory. What happens is that your file uses \r\n line endings but you open it in text mode and the CRT converts them to \n line endings. As a result you read into buffer less bytes than is the size returned by ftell, so that the last value of i is less than shaderFileSize, and, accordingly, you have some uninitialized bytes between the last value written to buffer[i] and the null-terminator.
Instead, replace your code with:
FILE * shaderFile{};
fopen_s(&shaderFile, shaderFileName, "rb"); // <-------- HERE !!!!
fseek(shaderFile, 0L, SEEK_END);
unsigned long shaderFileSize = ftell(shaderFile);
rewind(shaderFile);
char * buffer = (char *)malloc(shaderFileSize+1);
fread(buffer, shaderFileSize, 1, shaderFile);
buffer[shaderFileSize] = '\0';
I've been trying to get shaders to work. They don't.
std::cout << "a " << glGetError() <<std::endl; //making sure things work
GLuint vertexShader = 0;
GLuint fragmentShader = 1;
glCreateShader(GL_VERTEX_SHADER);
std::cout <<"b " << glGetError() <<std::endl;
glCreateShader(GL_FRAGMENT_SHADER);
std::cout <<"c " << glGetError() <<std::endl;
glShaderSource(vertexShader, GLsizei(1), (const char**)&VertexShaderSource, NULL);//ERROR
std::cout <<"d " << glGetError() <<std::endl;
glShaderSource(fragmentShader, GLsizei(1), (const char**)&FragmentShaderSource, NULL);//is somehow fine
std::cout <<"e " << glGetError() <<std::endl;
glCompileShader(vertexShader);
std::cout <<"f " << glGetError() <<std::endl;
glCompileShader(fragmentShader);
std::cout <<"g " << glGetError() <<std::endl;'
Now, the output is:
a 0
b 0
c 0
d 1281
e 0
f 1281
g 0
This means that the first glShaderSource call is not working, but, for some reason the next glShaderSource works, while doing what I think is the exactly same thing.
What is going on in here???
Shaders are:
const std::string VertexShaderSource ="#version 330 core"
""
"layout (location = 0) in vec3 position;"
""
"void main()"
"{"
" gl_Position = vec4(position.x, position.y, position.z, 1.0);"
"}";
const std::string FragmentShaderSource ="#version 330 core"
""
"out vec4 color;"
""
"void main()"
"{"
" color = vec4(1.0f, 1.0f, 1.0f, 1.0f);"
"}";
And if passing string this way ((const char**)&VertexShaderSource) is wrong, then why the second call works just fine?
You have to set vertexShader and fragmentShader to the result of glCreateShader instead of hardcoding it to 0 and 1.
GLuint vertexShader;
GLuint fragmentShader;
vertexShader = glCreateShader(GL_VERTEX_SHADER);
std::cout <<"b " << glGetError() <<std::endl;
fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
std::cout <<"c " << glGetError() <<std::endl;
The function glCreateShader returns 0 if it the call was not successful.
In your code you set vertexShader hardcoded to 0, and opengl reports an error for glShaderSource(vertexShader, ... because you pass 0 as id to glShaderSource.
The 1281 is an GL_INVALID_VALUE error and is reported if shader is not a value generated by OpenGL. glShaderSource: Errors
Beside that you (const char**)&VertexShaderSource is wrong to get the memory address where a std::string stores the data you have to use .c_str():
How to pass an std::string to glShaderSource?
You're passing the string wrongly, the correct way is VertexShaderSource.c_str().
I'm starting to learn OpenGL and I decided to use Ubuntu 15.10 on a VirtualBox to do this. I installed the packages mesa-common-dev (gl.h), libglew-dev (glew.h) and libglfw3-dev (glfw3.h) and following this tutorial I came up with this code:
#define GLEW_STATIC
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
using namespace std;
const GLchar* vertexSource =
"#version 130\n"
"in vec2 position;"
"void main() {"
" gl_Position = vec4(position, 0.0, 1.0);"
"}";
const GLchar* fragmentSource =
"#version 130\n"
"out vec4 outColor;"
"uniform vec3 triangleColor;"
"void main() {"
" outColor = vec4(triangleColor, 1.0);"
"}";
int main(int argc, char *argv[]) {
// GLFW initialization
if (!glfwInit()) {
cout << "Failed to initialize GLFW." << endl;
return -1;
}
cout << "GLFW initialized." << endl;
GLFWwindow* window = glfwCreateWindow(800, 600, "OpenGL", nullptr, nullptr);
glfwMakeContextCurrent(window);
cout << "Window and context created." << endl;
// GLEW initialization
glewExperimental = GL_TRUE;
if (glewInit() != GLEW_OK) {
cout << "Failed to initialize GLEW." << endl;
return -1;
}
cout << "GLEW initialized." << endl;
GLfloat vertices[] = {
0.0f, 0.5f,
0.5f, -0.5f,
-0.5f, -0.5f
};
// Create Vertex Array Object
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
cout << "VAO created and binded." << endl;
//Vertex Buffer Object
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
cout << "VBO created and binded." << endl;
// Create and compile the vertex shader
GLint status;
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexSource, NULL);
glCompileShader(vertexShader);
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &status);
if (!status) {
// Vertex shader error handling
char errorLog[512];
glGetShaderInfoLog(vertexShader, 512, NULL, errorLog);
cout << errorLog << endl;
glfwTerminate();
return -1;
}
cout << "Vertex shader created and compiled." << endl;
// Create and compile the fragment shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentSource, NULL);
glCompileShader(fragmentShader);
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &status);
if (!status) {
// Fragment shader error handling
char errorLog[512];
glGetShaderInfoLog(fragmentShader, 512, NULL, errorLog);
cout << errorLog << endl;
glfwTerminate();
return -1;
}
cout << "Fragment shader created and compiled." << endl;
// Link the vertex and fragment shader into a shader program
GLuint shaderProgram = glCreateProgram();
glAttachShader(shaderProgram, vertexShader);
glAttachShader(shaderProgram, fragmentShader);
glBindFragDataLocation(shaderProgram, 0, "outColor");
glLinkProgram(shaderProgram);
glUseProgram(shaderProgram);
cout << "Shaders linked." << endl;
// Specify the layout of the vertex data
GLint posAttrib = glGetAttribLocation(shaderProgram, "position");
glEnableVertexAttribArray(posAttrib);
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 0, 0);
cout << "Layout of the vertex data specified." << endl;
while( !glfwWindowShouldClose(window) ) {
glDrawArrays(GL_TRIANGLES, 0, 3);
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
glfwSwapBuffers(window);
glfwPollEvents();
}
// Prepare to close the application
glDeleteProgram(shaderProgram);
glDeleteShader(fragmentShader);
glDeleteShader(vertexShader);
glDeleteBuffers(1, &vbo);
glDeleteBuffers(1, &vao);
glfwTerminate();
return 0;
}
I'm compiling it as g++ test.cpp -o test -lglfw -lGLEW -lGL with no errors.
But, when I execute the program, it opens the window with a black screen without rendering the triangle. I tried to execute this code that someone posted on the comments of the tutorial but I got the same black screen and no polygons rendered. Is the problem on the code? Did I miss something when setting OpenGL up? Are the compiling parameters correct?
Since I didn't declare colors neither for the background nor for the triangle, both of them were black by default. Thus, the triangle was not visible althought it was being rendered. Setting up colors for both of them solved the problem.