I'm going insane because I can't make a simple set of triangles appear in my screen.
I'm using OpenGL3 (without the deprecated fixed pipeline) using the derelict bindings for the D programming language.
Can you spot the error in the following program? It compiles just fine and doesn't throw any OpenGL/GLSL error. It just shows a blank screen with the clear color I set.
import std.string;
import std.conv;
import derelict.opengl3.gl3;
import derelict.sdl2.sdl2;
immutable string minimalVertexShader = `
#version 120
attribute vec2 position;
void main(void)
{
gl_Position = vec4(position, 0, 1);
}
`;
immutable string minimalFragmentShader = `
#version 120
void main(void)
{
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
`;
void main() {
DerelictSDL2.load();
DerelictGL3.load();
if (SDL_Init(SDL_INIT_VIDEO) < 0) {
throw new Exception("Failed to initialize SDL: " ~ to!string(SDL_GetError()));
}
// Set OpenGL version
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
// Set OpenGL attributes
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
auto sdlwindow = SDL_CreateWindow("D App",
SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED,
640, 480, SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN);
if (!sdlwindow)
throw new Exception("Failed to create a SDL window: " ~ to!string(SDL_GetError()));
SDL_GL_CreateContext(sdlwindow);
DerelictGL3.reload();
float[] vertices = [ -1, -1, 1, -1, -1, 1, 1, 1];
ushort[] indices = [0, 1, 2, 3];
uint vbo, ibo;
// Create VBO
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, vertices.sizeof, vertices.ptr, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// Create IBO
glGenBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.sizeof, indices.ptr, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
// Program
auto program = glCreateProgram();
// Vertex Shader
auto vsh = glCreateShader(GL_VERTEX_SHADER);
auto vshSrc = minimalVertexShader.toStringz;
glShaderSource(vsh, 1, &vshSrc, null);
glCompileShader(vsh);
glAttachShader(program, vsh);
// Fragment Shader
auto fsh = glCreateShader(GL_FRAGMENT_SHADER);
auto fshSrc = minimalFragmentShader.toStringz;
glShaderSource(fsh, 1, &fshSrc, null);
glCompileShader(fsh);
glAttachShader(program, fsh);
glLinkProgram(program);
glUseProgram(program);
auto position = glGetAttribLocation(program, "position");
auto run = true;
while (run) {
SDL_Event event;
while (SDL_PollEvent(&event)) {
switch (event.type) {
case SDL_QUIT:
run = false;
default:
break;
}
}
glClearColor(1, 0.9, 0.8, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glEnableVertexAttribArray(position);
glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, vertices.sizeof, null);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glDrawElements(GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, null);
glDisableVertexAttribArray(position);
SDL_GL_SwapWindow(sdlwindow);
}
}
On this line:
glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, vertices.sizeof, null);
are you sure that you want vertices.sizeof, which has a value of 16? In D, a dynamic array is a struct with two members (ptr and length). You probably want either float.sizeof or float.sizeof * 2.
And the same goes for your BufferData calls.
Related
An OpenGL project can't display any object anymore. I tried to remake everything from scratch but it still doesn't works.
Main Code
#include <vector>
#include <iostream>
#include <glm/glm.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <imgui.h>
#include <imgui_impl_glfw_gl3.h>
#include "Loader.h"
void on_error(int error, const char* description)
{
std::cout << "GLFW error " << error << " : \"" << description << "\"" << std::endl;
}
int main()
{
//Init glfw
glfwSetErrorCallback(on_error);
if (!glfwInit()) return -1;
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
//Init window
auto window = glfwCreateWindow(1920, 1080, "gl_Crane", NULL, NULL);
if (!window) { glfwTerminate(); return -1; }
glfwMakeContextCurrent(window);
//Init glew
glewExperimental = true;
if (glewInit() != GLEW_OK) { glfwTerminate(); return -1; }
//Some opengl options
glEnable(GL_DEPTH_TEST);
glEnable(GL_DEBUG_OUTPUT);
glDepthFunc(GL_LESS);
//glEnable(GL_CULL_FACE);
//matrices
std::vector<glm::vec3> vertices = {
{-.2f, -.2f, 0}, {0, .2f, 0}, {.2f, -.2f, 0}
};
std::vector<glm::vec3> colors = {
{1, 0, 0}, {0, 1, 0}, {0, 0, 1}
};
std::vector<GLushort> indexes = {
0, 1, 2
};
//vertexArray
GLuint vertex_array;
glGenVertexArrays(1, &vertex_array);
glBindVertexArray(vertex_array);
//vertexbuffer
GLuint vertex_buffer;
glGenBuffers(1, &vertex_buffer);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(glm::vec3) * vertices.size(), vertices.data(), GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(glm::vec3), nullptr);
//colorbuffer
GLuint color_buffer;
glGenBuffers(1, &color_buffer);
glBindBuffer(GL_ARRAY_BUFFER, color_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(glm::vec3) * colors.size(), colors.data(), GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(glm::vec3), nullptr);
//indexbuffer
GLuint index_buffer;
glGenBuffers(1, &index_buffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, index_buffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLushort) * indexes.size(), indexes.data(), GL_STATIC_DRAW);
glBindVertexArray(0);
//Init shader
auto shader_program = new ShaderProgram;
shader_program->initFromFiles("../Crane/simple.vert", "../Crane/simple.frag");
//shader_program->addUniform("MVP");
ImGui_ImplGlfwGL3_Init(window, true);
glfwSwapInterval(1);
while (!glfwWindowShouldClose(window))
{
ImGui_ImplGlfwGL3_NewFrame();
ImGui::Text("Application average %.3f ms/frame (%.1f FPS)", 1000.0f / ImGui::GetIO().Framerate, ImGui::GetIO().Framerate);
//maj viewport
int display_w, display_h;
glfwGetFramebufferSize(window, &display_h, &display_w);
glViewport(0, 0, display_w, display_h);
//clear screen
glClearColor(.2f, .2f, .2f, 0);
glClear(GL_COLOR_BUFFER_BIT);
//draw stuff
shader_program->use();
glBindVertexArray(vertex_array);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
//auto mvp = glm::mat4(1);
//glUniformMatrix4fv(shader_program->uniform("MVP"), 1, GL_FALSE, glm::value_ptr(mvp));
glDrawElements(GL_TRIANGLES, indexes.size(), GL_UNSIGNED_SHORT, nullptr);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glBindVertexArray(0);
shader_program->disable();
ImGui::Render();
glfwSwapBuffers(window);
glfwPollEvents();
}
shader_program->disable();
ImGui_ImplGlfwGL3_Shutdown();
glfwTerminate();
return 0;
}
Fragment shader
#version 430
in vec3 fColors;
out vec4 fragColors;
void main()
{
fragColors = vec4(fColors, 1.0);
}
Vertex shader
#version 430
layout (location = 0) in vec4 vertexPosition;
layout (location = 1) in vec3 vertexColor;
out vec3 fColors;
void main()
{
fColors = vertexColor;
gl_Position = vertexPosition;
}
Additionally, I use the shader loader from here :
r3dux shader loader
In your program the Depth Test (glEnable(GL_DEPTH_TEST)).
The depth of a fragment is stored in a separate buffer. This buffer has to be cleared too, at the begin of every frame, as you do it with the color buffer. See glClear:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
Of course, if you would disable the depth test, then you would "see" the triangle, too.
I mean for the following code to draw a horizontal line across the screen. Instead, it draws the line and then draws a line that fades off towards the origin. A picture is posted below.
I think that the critical pieces of code are
float vertices[] =
{
-0.5, 0.7, 1, 1, 1,
0.5, 0.7, 1, 1, 1
};
and
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
while (not glfwWindowShouldClose(window))
{
glClearColor(0,0,0,1);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_LINE_STRIP, 0, sizeof(vertices)/sizeof(float));
glfwSwapBuffers(window);
glfwPollEvents();
}
The full code is
# include <GL/glew.h>
# include <GLFW/glfw3.h>
const GLchar * vertex_shader_source =
"\
# version 150 core\n\
in vec2 position;\
in vec3 color;\
out vec3 Color;\
void main() { Color = color; gl_Position = vec4(position, 0, 1); }\
";
const GLchar * fragment_shader_source =
"\
# version 150 core\n\
in vec3 Color;\
out vec4 outColor;\
void main() { outColor = vec4(Color, 1.0); }\
";
float vertices[] =
{
-0.5, 0.7, 1, 1, 1,
0.5, 0.7, 1, 1, 1
};
int main (int argc, char ** argv)
{
// ---- INITIALIZE STUFF ---- //
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow * window = glfwCreateWindow(800, 600, "open-gl", nullptr, nullptr);
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
glewInit();
// ---- MAKE SHADERS ---- //
GLuint vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &vertex_shader_source, NULL);
glCompileShader(vertex_shader);
GLuint fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &fragment_shader_source, NULL);
glCompileShader(fragment_shader);
GLuint shader_program = glCreateProgram();
glAttachShader(shader_program, vertex_shader);
glAttachShader(shader_program, fragment_shader);
glBindFragDataLocation(shader_program, 0, "outColor");
glLinkProgram(shader_program);
glUseProgram(shader_program);
// ---- MAKE VERTEX BUFFER OBJECT ---- //
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
// ---- MAKE VERTEX ARRAY OBJECT ---- //
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLint posAttrib = glGetAttribLocation(shader_program, "position");
GLint colAttrib = glGetAttribLocation(shader_program, "color");
glEnableVertexAttribArray(posAttrib);
glEnableVertexAttribArray(colAttrib);
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 5*sizeof(float), 0);
glVertexAttribPointer(colAttrib, 3, GL_FLOAT, GL_FALSE, 5*sizeof(float), (void*)(2*sizeof(float)));
// ---- DO OTHER THINGS ---- //
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
while (not glfwWindowShouldClose(window))
{
glClearColor(0,0,0,1);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_LINE_STRIP, 0, sizeof(vertices)/sizeof(float));
glfwSwapBuffers(window);
glfwPollEvents();
}
// ---- CLEAN UP ---- //
glDeleteProgram(shader_program);
glDeleteShader(vertex_shader);
glDeleteShader(fragment_shader);
glDeleteVertexArrays(1, &vao);
glDeleteBuffers(1, &vbo);
glfwTerminate();
return 0;
}
The output is
I have no idea what the problem is; I have searched the internet, but I cannot find anyone who has had a similar problem. The best that I have found is someone who said that OpenGL implementations do not tend to do lines very well. This does not happen with GL_LINES, however.
I am using OpenGL 3.2 with GLFW and GLEW. I have an Acer Aspire v5-571P-6648; I do not know specifically what model of graphics card it has, but I can look for it.
You last argument for
glDrawArrays(GL_LINE_STRIP, 0, sizeof(vertices)/sizeof(float));
is wrong, it should be 2 (see https://www.opengl.org/sdk/docs/man/html/glDrawArrays.xhtml).
I'm following the tutorials from: http://www.opengl-tutorial.org and trying a few things by my self.
At the moment my program can create triangles as class objects, transform their size and positions, and animate them (very simple code I just play around with). But when I'm trying to pass color value with Buffer Array to the Shaders, my triangles are not rendering.
I'll pass the relevant code here and try to make it understandble, hope someone can help me out here!
CODE:
#include <stdio.h>
#include <stdlib.h>
#include <iomanip>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/glm.hpp>
#include <glm/gtx/transform.hpp>
using namespace glm;
#include "loadShader.h"
#include "model.h"
int _screenWidth = 1024;
int _screenHeight = 768;
int main()
{
//START GLFW
if (!glfwInit())
{
fprintf(stderr, "Failed to initialize GLFW\n");
return -1;
}
//GLFW SETTINGS
glfwWindowHint(GLFW_SAMPLES, 4); //4x antialiasing
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); //OPENGL 3.3
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); //Mac compatible?
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
//open window
GLFWwindow* window;
window = glfwCreateWindow(_screenWidth, _screenHeight, "Tutorial 01", NULL, NULL);
if (window == NULL) {
fprintf(stderr, "Failed to open GLFW window. Make sure your GPU is openGL 3.3 compatible!\n");
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
//INITIALIZE GLEW
glewExperimental = true; //needed in core profile
if (glewInit() != GLEW_OK) {
fprintf(stderr, "Failed to initialize GLEW.\n");
return -1;
}
// Ensure we can capture the escape key being pressed below
glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
glClearColor(0.125f, 0.0f, 0.3725f, 0.0f);
//Enable Depth test
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
// Create and compile GLSL program from the shaders
GLuint programID = LoadShaders("Shaders/vertexShaders.vert", "Shaders/fragmentShaders.frag");
//get handle for MVP uniform
GLuint MatrixID = glGetUniformLocation(programID, "MVP");
/////////////////////
//MODEL//////////////
/////////////////////
//two triangles
int nVertices = 6;
//created through model class
model object1, object2;
object1.createTriangle();
object2.createTriangle();
//initialize buffer data arrays
GLfloat g_vertex_buffer_data[12*3];
// One color for each vertex
static const GLfloat g_color_buffer_data[] = {
0.583f, 0.771f, 0.014f,
0.609f, 0.115f, 0.436f,
0.327f, 0.483f, 0.844f,
0.822f, 0.569f, 0.201f,
0.435f, 0.602f, 0.223f,
0.310f, 0.747f, 0.185f };
//////////////////////////
//////////////////////////
//////////////////////////
//CREATE BUFFER
//This will identify our vertex and color buffer
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
GLuint colorbuffer;
glGenBuffers(1, &colorbuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
//counters
float time = 0.0f;
int counter = 0;
int counterStep = 100;
do {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
time = time + 0.01;
counter = counter + 1;
//TRANSFORM MY TRIANGLES (its working)
glm::vec3 rotationAxis(0.0f, 0.0f, 1.0f);
glm::vec3 translation(0.0f, 0.0f, 0.025f);
float rotationAngle = 0.25f;
object1.rotate(rotationAngle, rotationAxis);
//object2.rotate(0.5*rotationAngle, rotationAxis);
object1.translate(translation);
//Update coordinates in vertex buffer (both triangles)
for (int i = 0; i < 3; i++)
{
g_vertex_buffer_data[i * 3] = object1.transformedPosition[i].x;
g_vertex_buffer_data[i * 3 + 1] = object1.transformedPosition[i].y;
g_vertex_buffer_data[i * 3 + 2] = object1.transformedPosition[i].z;
}
for (int i = 0; i < 3; i++)
{
g_vertex_buffer_data[i * 3 + 9] = object2.transformedPosition[i].x;
g_vertex_buffer_data[i * 3 + 10] = object2.transformedPosition[i].y;
g_vertex_buffer_data[i * 3 + 11] = object2.transformedPosition[i].z;
}
//Model matrix
glm::mat4 modelM = glm::mat4(1.0f);
//Projection matrix:
glm::mat4 projectionM = glm::perspective(45.0f, 4.0f / 3.0f, 0.1f, 100.0f);
//Camera matrix:
glm::mat4 viewM = lookAt(
glm::vec3(8, 2, 2+10*time),
glm::vec3(0, 0, 0),
glm::vec3(0, 1, 0));
//MODEL VIEW PROJECTION MATRIX:
glm::mat4 mvpM = projectionM * viewM * modelM;
//Give our vertices and colors to OpenGL
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
glBufferData(GL_ARRAY_BUFFER, sizeof(g_color_buffer_data), g_color_buffer_data, GL_STATIC_DRAW);
/////////////////////////////////////////////
//USE SHADERS
glUseProgram(programID);
//Send our transformation to the currently bound shader, MVP uniform
glUniformMatrix4fv(MatrixID, 1, 0, &mvpM[0][0]);
//1rst attribute buffer: vertices
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, //attribute 0, no particular reason, but must match the layout in the shader
3, //size
GL_FLOAT, //type
GL_FALSE, //normalized?
0, //stride
(void*)0 //array buffer offset
);
//2nd attribute buffer: colors
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
glVertexAttribPointer(
1, //attribute number
3, //size
GL_FLOAT, //type
GL_FALSE, //normalized?
0, //stride
(void*)0 //array buffer offset
);
//Draw the triangle
glDrawArrays(GL_TRIANGLES, 0, nVertices); //Starting from vertex 0; 3 vertices total
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
// Swap buffers
glfwSwapBuffers(window);
glfwPollEvents();
}
// Check if the ESC key was pressed or the window was closed
while (glfwGetKey(window, GLFW_KEY_ESCAPE) != GLFW_PRESS &&
glfwWindowShouldClose(window) == 0);
// Cleanup VBO
glDeleteBuffers(1, &vertexbuffer);
glDeleteBuffers(1, &colorbuffer);
glDeleteProgram(programID);
glDeleteVertexArrays(1, &VertexArrayID);
// Close OpenGL window and terminate GLFW
glfwTerminate();
return 0;
}
Vertexshader:
#version 330 core
layout(location = 0) in vec3 vertexPosition_modelspace;
layout(location = 1) in vec3 vertexColor;
uniform mat4 MVP;
out vec3 fragmentColor;
void main(){
gl_Position = MVP * vec4(vertexPosition_modelspace,1.0);
fragmentColor = vertexColor;
}
Fragmentshder:
#version 330 core
in vec3 fragmentColor;
out vec3 color;
void main(){
color = fragmentColor;
}
If I remove:
glBufferData(GL_ARRAY_BUFFER, sizeof(g_color_buffer_data), g_color_buffer_data, GL_STATIC_DRAW);
and:
//2nd attribute buffer: colors
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
glVertexAttribPointer(
1, //attribute number
3, //size
GL_FLOAT, //type
GL_FALSE, //normalized?
0, //stride
(void*)0 //array buffer offset
);
from the main function, the triangles renders.
EDIT
Here is only the relevant code which is where I think there is a problem:
//FIRST DO THIS: (but not sure why..)
GLuint VertexArrayID;
glGenVertexArrays(1, &VertexArrayID);
glBindVertexArray(VertexArrayID);
//initialize buffer data arrays
GLfloat g_vertex_buffer_data[] = { something };
GLfloat g_color_buffer_data[] = { something };
//CREATE BUFFER
GLuint vertexbuffer;
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
GLuint colorbuffer;
glGenBuffers(1, &colorbuffer);
glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
//LOOP
do {
//UPDATE BUFFER DATA
GLfloat g_vertex_buffer_data[] = { something new };
GLfloat g_color_buffer_data[] = { something new };
//SEND NEW BUFFER DATA TO SHADERS
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
glBufferData(GL_ARRAY_BUFFER, sizeof(g_color_buffer_data), g_color_buffer_data, GL_STATIC_DRAW);
//USE SHADERS
glUseProgram(programID);
//1rst attribute buffer: vertices positions
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glVertexAttribPointer(
0, //attribute 0, no particular reason, but must match the layout in the shader
3, //size
GL_FLOAT, //type
GL_FALSE, //normalized?
0, //stride
(void*)0 //array buffer offset
);
//2nd attribute buffer: colors
glEnableVertexAttribArray(1);
glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
glVertexAttribPointer(
1, //attribute number
3, //size
GL_FLOAT, //type
GL_FALSE, //normalized?
0, //stride
(void*)0 //array buffer offset
);
//Draw the triangles
glDrawArrays(GL_TRIANGLES, 0, nVertices); //Starting from vertex 0; 3 vertices total
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
// Swap buffers
glfwSwapBuffers(window);
glfwPollEvents();
}
// Check if the ESC key was pressed or the window was closed
while (glfwGetKey(window, GLFW_KEY_ESCAPE) != GLFW_PRESS &&
glfwWindowShouldClose(window) == 0);
// Cleanup VBO
glDeleteBuffers(1, &vertexbuffer);
glDeleteBuffers(1, &colorbuffer);
glDeleteProgram(programID);
glDeleteVertexArrays(1, &VertexArrayID);
// Close OpenGL window and terminate GLFW
glfwTerminate();
return 0;
}
SOLUTION
I sort off solved my problem by looking at this tutorial https://www.opengl.org/wiki/Tutorial2%3a_VAOs,_VBOs,_Vertex_and_Fragment_Shaders_%28C_/_SDL%29 .
Basically I just moved the bufferBinding and bufferData functions outside the loop. Somehow that was ok before with the vertex positions, but not the vertex colors...
I read and learned more and changed some of the code and it's working now and I'm happy with it now:)
Before you bufferdata, you should bind the buffer that you want send data in.
it should be
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(g_color_buffer_data), g_color_buffer_data, GL_STATIC_DRAW);
Your code send did,not send any data to vertex position buffer.
Ps. for efficiency you should generate buffer, and submit the data before the loop.
I have a simple D application using DerelictGL3 and DerelictSDL2. I am trying to render a red triangle using vertex buffer objects however whenever I run glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, float.sizeof * 2, null);, glGetError() returns 1282 (Invalid Operation). The code is as follows...
app.d
import std.string;
import std.conv;
import std.stdio;
import std.file;
import std.c.stdlib;
import derelict.opengl3.gl3;
import derelict.sdl2.sdl;
void main() {
DerelictSDL2.load();
DerelictGL3.load();
assert(SDL_Init(SDL_INIT_VIDEO) >= 0);
auto vertex_shader = (cast(string) read("shaders/minimal.vert")).toStringz;
auto fragment_shader = (cast(string) read("shaders/minimal.frag")).toStringz;
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
auto final_param = SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN;
auto window = SDL_CreateWindow("Triangle", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 640, 480, final_param);
assert(window);
SDL_GL_CreateContext(window);
DerelictGL3.reload();
GLfloat[] vertices = [1, 1, 1, -1, -1, 1, -0.8, -1, 1];
uint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, vertices.length * float.sizeof, vertices.ptr, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
auto program = glCreateProgram();
auto vshader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vshader, 1, &vertex_shader, null);
glCompileShader(vshader);
glAttachShader(program, vshader);
auto fshader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fshader, 1, &fragment_shader, null);
glCompileShader(fshader);
glAttachShader(program, fshader);
glLinkProgram(program);
glUseProgram(program);
auto position = glGetAttribLocation(program, "position");
auto close = false;
check();
while(!close) {
SDL_Event event;
while(SDL_PollEvent(&event)) {
if(event.type == SDL_QUIT) close = true;
}
glClearColor(1, 0.4, 0.4, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glEnableVertexAttribArray(position);
check("before"); // Does not return error.
glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, float.sizeof * 2, null); // <- Causing Invalid Operation GL error.
check("after"); // Returns error.
glDrawArrays(GL_TRIANGLES, 0, 3);
SDL_GL_SwapWindow(window);
}
}
void check(string msg="") {
auto error = glGetError();
assert(error == 0, msg ~ ":" ~ to!string(error));
}
shaders/minimal.vert
#version 120
attribute vec2 position;
void main(void) {
gl_Position = vec4(position, 0, 1);
}
shaders/minimal.frag
#version 120
void main(void) {
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
When ignoring the error, a red rectangle is rendered in the top right corner of the screen.
You're missing a VAO which is required in OpenGL 3.2. You can create it before other buffers like this:
GLuint vao;
glGenVertexArrays(1,&vao);
glBindVertexArray(vao);
What might help is enabling OpenGL debug logging, you can do it like this: https://github.com/d-gamedev-team/gfm/blob/master/opengl/gfm/opengl/opengl.d#L419
Some drivers like the NVIDIA one provide extensive warning when something go wrong, the problem might not be where you think it is.
I'm trying to render a simple test shader with the opengl 3.3 core profile. but everything I get is a black window.
GLFWwindow* window;
GLuint vao;
GLuint vbo[2];
GLuint program;
const GLfloat square[8] = {
-1.0, -1.0,
-1.0, 1.0,
1.0, 1.0,
1.0, -1.0
};
const GLfloat indices[4] = { 0, 1, 2, 3 };
init opengl core context and window
if( !glfwInit() ) {
std::cerr << "Failed to initialize GLFW\n";
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
// Open a window and create its OpenGL context
window = glfwCreateWindow( 1024, 768, "", 0, 0);
if( window == NULL ) {
std::cerr << "Failed to open GLFW window.\n";
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Initialize GLEW
if (gl3wInit()) {
std::cerr << "Failed to initialize GLEW" << std::endl;
return -1;
}
if (!gl3wIsSupported(3, 3)) {
std::cerr << "OpenGL Version 3.3 not supported" << std::endl;
return -1;
}
init vbo and its index buffer, then vao, and the shader program, bind the vertex shader input to 0
glGenBuffers(2, vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo[0]);
glBufferData(GL_ARRAY_BUFFER, 8*sizeof(GLfloat), square, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, vbo[1]);
glBufferData(GL_ARRAY_BUFFER, 4*sizeof(GLushort), indices, GL_STATIC_DRAW);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
program = glCreateProgram();
GLuint vertex_shader, fragment_shader;
loadShader_FILE(vertex_shader, "shader/default.vsh", GL_VERTEX_SHADER);
glAttachShader(program, vertex_shader);
loadShader_FILE(fragment_shader, "shader/default.fsh", GL_FRAGMENT_SHADER);
glAttachShader(program, fragment_shader);
glBindAttribLocation(program, 0, "pos");
glLinkProgram(program);
start rendering
glUseProgram(program);
glBindBuffer(GL_ARRAY_BUFFER, vbo[0]);
glVertexAttribPointer(
0,
2,
GL_FLOAT,
GL_FALSE,
sizeof(GLfloat)*2,
(void*)0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo[1]);
glDrawElements(
GL_TRIANGLE_STRIP,
4,
GL_UNSIGNED_SHORT,
(void*)0);
glDisableVertexAttribArray(0);
glfwSwapBuffers(window);
vertex shader
#version 330
in vec2 pos;
out vec2 c;
void main(void)
{
gl_Position = vec4(pos, 0.0, 1.0);
c = (pos+1)*0.5;
}
fragment shader
#version 330
in vec2 c;
out vec4 color;
void main(void)
{
color = vec4(c, 1, 1);
}
the shaders compile without errors, apitrace can't find any opengl errors
Well, this cannot be good:
const GLfloat indices[4] = { 0, 1, 2, 3 };
You told OpenGL that those were unsigned shorts, but they are floating-point. Nevermind the fact that GLfloat is twice the size of GLushort, the way those numbers are represented is very different. Floating-point vertex indices do not make a whole lot of sense.
Instead, you should use:
const GLushort indices[4] = { 0, 1, 2, 3 };