glVertexAttribPointer causing Invalid Operation OpenGL error in D - opengl

I have a simple D application using DerelictGL3 and DerelictSDL2. I am trying to render a red triangle using vertex buffer objects however whenever I run glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, float.sizeof * 2, null);, glGetError() returns 1282 (Invalid Operation). The code is as follows...
app.d
import std.string;
import std.conv;
import std.stdio;
import std.file;
import std.c.stdlib;
import derelict.opengl3.gl3;
import derelict.sdl2.sdl;
void main() {
DerelictSDL2.load();
DerelictGL3.load();
assert(SDL_Init(SDL_INIT_VIDEO) >= 0);
auto vertex_shader = (cast(string) read("shaders/minimal.vert")).toStringz;
auto fragment_shader = (cast(string) read("shaders/minimal.frag")).toStringz;
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
auto final_param = SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN;
auto window = SDL_CreateWindow("Triangle", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 640, 480, final_param);
assert(window);
SDL_GL_CreateContext(window);
DerelictGL3.reload();
GLfloat[] vertices = [1, 1, 1, -1, -1, 1, -0.8, -1, 1];
uint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, vertices.length * float.sizeof, vertices.ptr, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
auto program = glCreateProgram();
auto vshader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vshader, 1, &vertex_shader, null);
glCompileShader(vshader);
glAttachShader(program, vshader);
auto fshader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fshader, 1, &fragment_shader, null);
glCompileShader(fshader);
glAttachShader(program, fshader);
glLinkProgram(program);
glUseProgram(program);
auto position = glGetAttribLocation(program, "position");
auto close = false;
check();
while(!close) {
SDL_Event event;
while(SDL_PollEvent(&event)) {
if(event.type == SDL_QUIT) close = true;
}
glClearColor(1, 0.4, 0.4, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glEnableVertexAttribArray(position);
check("before"); // Does not return error.
glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, float.sizeof * 2, null); // <- Causing Invalid Operation GL error.
check("after"); // Returns error.
glDrawArrays(GL_TRIANGLES, 0, 3);
SDL_GL_SwapWindow(window);
}
}
void check(string msg="") {
auto error = glGetError();
assert(error == 0, msg ~ ":" ~ to!string(error));
}
shaders/minimal.vert
#version 120
attribute vec2 position;
void main(void) {
gl_Position = vec4(position, 0, 1);
}
shaders/minimal.frag
#version 120
void main(void) {
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
When ignoring the error, a red rectangle is rendered in the top right corner of the screen.

You're missing a VAO which is required in OpenGL 3.2. You can create it before other buffers like this:
GLuint vao;
glGenVertexArrays(1,&vao);
glBindVertexArray(vao);

What might help is enabling OpenGL debug logging, you can do it like this: https://github.com/d-gamedev-team/gfm/blob/master/opengl/gfm/opengl/opengl.d#L419
Some drivers like the NVIDIA one provide extensive warning when something go wrong, the problem might not be where you think it is.

Related

OpenGL program won't display any objects?

An OpenGL project can't display any object anymore. I tried to remake everything from scratch but it still doesn't works.
Main Code
#include <vector>
#include <iostream>
#include <glm/glm.hpp>
#include <glm/gtc/type_ptr.hpp>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <imgui.h>
#include <imgui_impl_glfw_gl3.h>
#include "Loader.h"
void on_error(int error, const char* description)
{
std::cout << "GLFW error " << error << " : \"" << description << "\"" << std::endl;
}
int main()
{
//Init glfw
glfwSetErrorCallback(on_error);
if (!glfwInit()) return -1;
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
//Init window
auto window = glfwCreateWindow(1920, 1080, "gl_Crane", NULL, NULL);
if (!window) { glfwTerminate(); return -1; }
glfwMakeContextCurrent(window);
//Init glew
glewExperimental = true;
if (glewInit() != GLEW_OK) { glfwTerminate(); return -1; }
//Some opengl options
glEnable(GL_DEPTH_TEST);
glEnable(GL_DEBUG_OUTPUT);
glDepthFunc(GL_LESS);
//glEnable(GL_CULL_FACE);
//matrices
std::vector<glm::vec3> vertices = {
{-.2f, -.2f, 0}, {0, .2f, 0}, {.2f, -.2f, 0}
};
std::vector<glm::vec3> colors = {
{1, 0, 0}, {0, 1, 0}, {0, 0, 1}
};
std::vector<GLushort> indexes = {
0, 1, 2
};
//vertexArray
GLuint vertex_array;
glGenVertexArrays(1, &vertex_array);
glBindVertexArray(vertex_array);
//vertexbuffer
GLuint vertex_buffer;
glGenBuffers(1, &vertex_buffer);
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(glm::vec3) * vertices.size(), vertices.data(), GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(glm::vec3), nullptr);
//colorbuffer
GLuint color_buffer;
glGenBuffers(1, &color_buffer);
glBindBuffer(GL_ARRAY_BUFFER, color_buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(glm::vec3) * colors.size(), colors.data(), GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(glm::vec3), nullptr);
//indexbuffer
GLuint index_buffer;
glGenBuffers(1, &index_buffer);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, index_buffer);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLushort) * indexes.size(), indexes.data(), GL_STATIC_DRAW);
glBindVertexArray(0);
//Init shader
auto shader_program = new ShaderProgram;
shader_program->initFromFiles("../Crane/simple.vert", "../Crane/simple.frag");
//shader_program->addUniform("MVP");
ImGui_ImplGlfwGL3_Init(window, true);
glfwSwapInterval(1);
while (!glfwWindowShouldClose(window))
{
ImGui_ImplGlfwGL3_NewFrame();
ImGui::Text("Application average %.3f ms/frame (%.1f FPS)", 1000.0f / ImGui::GetIO().Framerate, ImGui::GetIO().Framerate);
//maj viewport
int display_w, display_h;
glfwGetFramebufferSize(window, &display_h, &display_w);
glViewport(0, 0, display_w, display_h);
//clear screen
glClearColor(.2f, .2f, .2f, 0);
glClear(GL_COLOR_BUFFER_BIT);
//draw stuff
shader_program->use();
glBindVertexArray(vertex_array);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
//auto mvp = glm::mat4(1);
//glUniformMatrix4fv(shader_program->uniform("MVP"), 1, GL_FALSE, glm::value_ptr(mvp));
glDrawElements(GL_TRIANGLES, indexes.size(), GL_UNSIGNED_SHORT, nullptr);
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glBindVertexArray(0);
shader_program->disable();
ImGui::Render();
glfwSwapBuffers(window);
glfwPollEvents();
}
shader_program->disable();
ImGui_ImplGlfwGL3_Shutdown();
glfwTerminate();
return 0;
}
Fragment shader
#version 430
in vec3 fColors;
out vec4 fragColors;
void main()
{
fragColors = vec4(fColors, 1.0);
}
Vertex shader
#version 430
layout (location = 0) in vec4 vertexPosition;
layout (location = 1) in vec3 vertexColor;
out vec3 fColors;
void main()
{
fColors = vertexColor;
gl_Position = vertexPosition;
}
Additionally, I use the shader loader from here :
r3dux shader loader
In your program the Depth Test (glEnable(GL_DEPTH_TEST)).
The depth of a fragment is stored in a separate buffer. This buffer has to be cleared too, at the begin of every frame, as you do it with the color buffer. See glClear:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
Of course, if you would disable the depth test, then you would "see" the triangle, too.

OpenGL screen is flickering when enabling DEPTH_TEST

I'm having a problem with my OpenGL application. When I don't have DEPTH_TEST enabled it draws fine (except for the wrong draw order), but if I enable it everything starts to flicker. OpenGL doesn't give any errors. I also didn't forget the glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);. I'm pretty clueless to what causes my problem. Any help would be appreciated.
main.cpp
#include <iostream>
#include <SDL2\SDL.h>
#include <GL\glew.h>
#include <glm\glm.hpp>
#include <glm\gtc\matrix_transform.hpp>
#include <fstream>
#include <string>
#include "vertexdata.h"
#define WIDTH 800
#define HEIGHT 600
std::string filetobuf(const char*);
int main(int, char**) {
// Init the window with an OpenGL context
SDL_Window *window;
SDL_Init(SDL_INIT_EVERYTHING);
window = SDL_CreateWindow("SDL window", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, WIDTH, HEIGHT, SDL_WINDOW_OPENGL);
SDL_GLContext context = SDL_GL_CreateContext(window);
glewInit();
// Create the shaders
std::string vertexsourceString = filetobuf("tutorial2.vert");
std::string fragmentsourceString = filetobuf("tutorial2.frag");
const char* vertexsource = vertexsourceString.c_str();
const char* fragmentsource = fragmentsourceString.c_str();
GLuint vertexshader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexshader, 1, (const GLchar**)&vertexsource, 0);
glCompileShader(vertexshader);
GLuint fragmentshader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentshader, 1, (const GLchar**)&fragmentsource, 0);
glCompileShader(fragmentshader);
GLuint shaderprogram = glCreateProgram();
glAttachShader(shaderprogram, vertexshader);
glAttachShader(shaderprogram, fragmentshader);
glBindAttribLocation(shaderprogram, 0, "in_Position");
glBindAttribLocation(shaderprogram, 1, "in_Color");
glLinkProgram(shaderprogram);
glUseProgram(shaderprogram);
// Setup rectangles
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLuint vbo[3];
glGenBuffers(3, vbo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo[0]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, INDICES_SIZE, indices, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, vbo[1]);
glBufferData(GL_ARRAY_BUFFER, VERTEX_BUFFER_SIZE, vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo[2]);
glBufferData(GL_ARRAY_BUFFER, COLOR_BUFFER_SIZE, colors, GL_STATIC_DRAW);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 0, 0);
glEnableVertexAttribArray(1);
GLint projectionMatrixLocation = glGetUniformLocation(shaderprogram, "projectionMatrix");
glm::mat4 projectionMatrix = glm::perspective(70.0f, (float)WIDTH / (float)HEIGHT, 0.0f, 100.0f);
glUniformMatrix4fv(projectionMatrixLocation, 1, GL_FALSE, &projectionMatrix[0][0]);
GLint modelMatrixLocation = glGetUniformLocation(shaderprogram, "modelMatrix");
glClearColor(0, 0, 1, 1);
glEnable(GL_DEPTH_TEST);
bool running = true;
while (running) {
SDL_Event e;
while (SDL_PollEvent(&e)) {
if (e.type == SDL_QUIT) {
running = false;
}
}
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glm::mat4 modelMatrix(1);
modelMatrix = glm::translate(modelMatrix, glm::vec3(0, 0, -5));
modelMatrix = glm::rotate(modelMatrix, SDL_GetTicks() / 500.0f, glm::vec3(0, 1, 0));
glUniformMatrix4fv(modelMatrixLocation, 1, GL_FALSE, &modelMatrix[0][0]);
glDrawElements(GL_TRIANGLES, INDICES_SIZE, GL_UNSIGNED_BYTE, 0);
SDL_GL_SwapWindow(window);
}
SDL_GL_DeleteContext(context);
SDL_DestroyWindow(window);
SDL_Quit();
return 0;
}
std::string filetobuf(const char* path)
{
if (path == nullptr) {
std::cout << "No file specified!" << std::endl;
return 0;
}
std::ifstream file(path);
if (!file.is_open()) {
std::cout << "File not found! '" << path << "'" << std::endl;
return 0;
}
std::string source;
std::string line;
while (file.good()) {
std::getline(file, line);
source += line + "\n";
}
file.close();
return source;
}
vertexdata.h
#pragma once
#include <GL\GL.h>
#define INDICES_SIZE 12
#define COLOR_VECTOR_SIZE 3
#define VERTEX_VECTOR_SIZE 3
#define VERTEX_COUNT 8
#define COLOR_BUFFER_SIZE COLOR_VECTOR_SIZE * VERTEX_COUNT * sizeof(GLfloat)
#define VERTEX_BUFFER_SIZE VERTEX_VECTOR_SIZE * VERTEX_COUNT * sizeof(GLfloat)
static GLubyte indices[] = {
0, 1, 2,
2, 3, 0,
4, 5, 6,
6, 7, 4,
};
static GLfloat colors[] = {
1, 0, 0,
1, 0, 0,
1, 0, 0,
1, 0, 0,
0, 1, 0,
0, 1, 0,
0, 1, 0,
0, 1, 0,
};
static GLfloat vertices[] = {
1, 1, 1,
1, -1, 1,
-1, -1, 1,
-1, 1, 1,
1, 1, -1,
1, -1, -1,
-1, -1, -1,
-1, 1, -1,
};
Note: I removed the error checking from the main.cpp file to decrease the file size.
You can't set near plane to zero. Put something reasonable, like 0.1f instead.
glm::mat4 projectionMatrix = glm::perspective(70.0f, (float)WIDTH / (float)HEIGHT, 0.1f, 100.0f);

GL_LINE_STRIP always terminates at the origin

I mean for the following code to draw a horizontal line across the screen. Instead, it draws the line and then draws a line that fades off towards the origin. A picture is posted below.
I think that the critical pieces of code are
float vertices[] =
{
-0.5, 0.7, 1, 1, 1,
0.5, 0.7, 1, 1, 1
};
and
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
while (not glfwWindowShouldClose(window))
{
glClearColor(0,0,0,1);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_LINE_STRIP, 0, sizeof(vertices)/sizeof(float));
glfwSwapBuffers(window);
glfwPollEvents();
}
The full code is
# include <GL/glew.h>
# include <GLFW/glfw3.h>
const GLchar * vertex_shader_source =
"\
# version 150 core\n\
in vec2 position;\
in vec3 color;\
out vec3 Color;\
void main() { Color = color; gl_Position = vec4(position, 0, 1); }\
";
const GLchar * fragment_shader_source =
"\
# version 150 core\n\
in vec3 Color;\
out vec4 outColor;\
void main() { outColor = vec4(Color, 1.0); }\
";
float vertices[] =
{
-0.5, 0.7, 1, 1, 1,
0.5, 0.7, 1, 1, 1
};
int main (int argc, char ** argv)
{
// ---- INITIALIZE STUFF ---- //
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
GLFWwindow * window = glfwCreateWindow(800, 600, "open-gl", nullptr, nullptr);
glfwMakeContextCurrent(window);
glewExperimental = GL_TRUE;
glewInit();
// ---- MAKE SHADERS ---- //
GLuint vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, 1, &vertex_shader_source, NULL);
glCompileShader(vertex_shader);
GLuint fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, 1, &fragment_shader_source, NULL);
glCompileShader(fragment_shader);
GLuint shader_program = glCreateProgram();
glAttachShader(shader_program, vertex_shader);
glAttachShader(shader_program, fragment_shader);
glBindFragDataLocation(shader_program, 0, "outColor");
glLinkProgram(shader_program);
glUseProgram(shader_program);
// ---- MAKE VERTEX BUFFER OBJECT ---- //
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
// ---- MAKE VERTEX ARRAY OBJECT ---- //
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLint posAttrib = glGetAttribLocation(shader_program, "position");
GLint colAttrib = glGetAttribLocation(shader_program, "color");
glEnableVertexAttribArray(posAttrib);
glEnableVertexAttribArray(colAttrib);
glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 5*sizeof(float), 0);
glVertexAttribPointer(colAttrib, 3, GL_FLOAT, GL_FALSE, 5*sizeof(float), (void*)(2*sizeof(float)));
// ---- DO OTHER THINGS ---- //
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
while (not glfwWindowShouldClose(window))
{
glClearColor(0,0,0,1);
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_LINE_STRIP, 0, sizeof(vertices)/sizeof(float));
glfwSwapBuffers(window);
glfwPollEvents();
}
// ---- CLEAN UP ---- //
glDeleteProgram(shader_program);
glDeleteShader(vertex_shader);
glDeleteShader(fragment_shader);
glDeleteVertexArrays(1, &vao);
glDeleteBuffers(1, &vbo);
glfwTerminate();
return 0;
}
The output is
I have no idea what the problem is; I have searched the internet, but I cannot find anyone who has had a similar problem. The best that I have found is someone who said that OpenGL implementations do not tend to do lines very well. This does not happen with GL_LINES, however.
I am using OpenGL 3.2 with GLFW and GLEW. I have an Acer Aspire v5-571P-6648; I do not know specifically what model of graphics card it has, but I can look for it.
You last argument for
glDrawArrays(GL_LINE_STRIP, 0, sizeof(vertices)/sizeof(float));
is wrong, it should be 2 (see https://www.opengl.org/sdk/docs/man/html/glDrawArrays.xhtml).

opengl 3.3 core profile render fails

I'm trying to render a simple test shader with the opengl 3.3 core profile. but everything I get is a black window.
GLFWwindow* window;
GLuint vao;
GLuint vbo[2];
GLuint program;
const GLfloat square[8] = {
-1.0, -1.0,
-1.0, 1.0,
1.0, 1.0,
1.0, -1.0
};
const GLfloat indices[4] = { 0, 1, 2, 3 };
init opengl core context and window
if( !glfwInit() ) {
std::cerr << "Failed to initialize GLFW\n";
return -1;
}
glfwWindowHint(GLFW_SAMPLES, 4);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
// Open a window and create its OpenGL context
window = glfwCreateWindow( 1024, 768, "", 0, 0);
if( window == NULL ) {
std::cerr << "Failed to open GLFW window.\n";
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// Initialize GLEW
if (gl3wInit()) {
std::cerr << "Failed to initialize GLEW" << std::endl;
return -1;
}
if (!gl3wIsSupported(3, 3)) {
std::cerr << "OpenGL Version 3.3 not supported" << std::endl;
return -1;
}
init vbo and its index buffer, then vao, and the shader program, bind the vertex shader input to 0
glGenBuffers(2, vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo[0]);
glBufferData(GL_ARRAY_BUFFER, 8*sizeof(GLfloat), square, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, vbo[1]);
glBufferData(GL_ARRAY_BUFFER, 4*sizeof(GLushort), indices, GL_STATIC_DRAW);
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
program = glCreateProgram();
GLuint vertex_shader, fragment_shader;
loadShader_FILE(vertex_shader, "shader/default.vsh", GL_VERTEX_SHADER);
glAttachShader(program, vertex_shader);
loadShader_FILE(fragment_shader, "shader/default.fsh", GL_FRAGMENT_SHADER);
glAttachShader(program, fragment_shader);
glBindAttribLocation(program, 0, "pos");
glLinkProgram(program);
start rendering
glUseProgram(program);
glBindBuffer(GL_ARRAY_BUFFER, vbo[0]);
glVertexAttribPointer(
0,
2,
GL_FLOAT,
GL_FALSE,
sizeof(GLfloat)*2,
(void*)0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo[1]);
glDrawElements(
GL_TRIANGLE_STRIP,
4,
GL_UNSIGNED_SHORT,
(void*)0);
glDisableVertexAttribArray(0);
glfwSwapBuffers(window);
vertex shader
#version 330
in vec2 pos;
out vec2 c;
void main(void)
{
gl_Position = vec4(pos, 0.0, 1.0);
c = (pos+1)*0.5;
}
fragment shader
#version 330
in vec2 c;
out vec4 color;
void main(void)
{
color = vec4(c, 1, 1);
}
the shaders compile without errors, apitrace can't find any opengl errors
Well, this cannot be good:
const GLfloat indices[4] = { 0, 1, 2, 3 };
You told OpenGL that those were unsigned shorts, but they are floating-point. Nevermind the fact that GLfloat is twice the size of GLushort, the way those numbers are represented is very different. Floating-point vertex indices do not make a whole lot of sense.
Instead, you should use:
const GLushort indices[4] = { 0, 1, 2, 3 };

Rendering a simple rectangle with OpenGL 3 in D language

I'm going insane because I can't make a simple set of triangles appear in my screen.
I'm using OpenGL3 (without the deprecated fixed pipeline) using the derelict bindings for the D programming language.
Can you spot the error in the following program? It compiles just fine and doesn't throw any OpenGL/GLSL error. It just shows a blank screen with the clear color I set.
import std.string;
import std.conv;
import derelict.opengl3.gl3;
import derelict.sdl2.sdl2;
immutable string minimalVertexShader = `
#version 120
attribute vec2 position;
void main(void)
{
gl_Position = vec4(position, 0, 1);
}
`;
immutable string minimalFragmentShader = `
#version 120
void main(void)
{
gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}
`;
void main() {
DerelictSDL2.load();
DerelictGL3.load();
if (SDL_Init(SDL_INIT_VIDEO) < 0) {
throw new Exception("Failed to initialize SDL: " ~ to!string(SDL_GetError()));
}
// Set OpenGL version
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
// Set OpenGL attributes
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
auto sdlwindow = SDL_CreateWindow("D App",
SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED,
640, 480, SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN);
if (!sdlwindow)
throw new Exception("Failed to create a SDL window: " ~ to!string(SDL_GetError()));
SDL_GL_CreateContext(sdlwindow);
DerelictGL3.reload();
float[] vertices = [ -1, -1, 1, -1, -1, 1, 1, 1];
ushort[] indices = [0, 1, 2, 3];
uint vbo, ibo;
// Create VBO
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, vertices.sizeof, vertices.ptr, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// Create IBO
glGenBuffers(1, &ibo);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.sizeof, indices.ptr, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
// Program
auto program = glCreateProgram();
// Vertex Shader
auto vsh = glCreateShader(GL_VERTEX_SHADER);
auto vshSrc = minimalVertexShader.toStringz;
glShaderSource(vsh, 1, &vshSrc, null);
glCompileShader(vsh);
glAttachShader(program, vsh);
// Fragment Shader
auto fsh = glCreateShader(GL_FRAGMENT_SHADER);
auto fshSrc = minimalFragmentShader.toStringz;
glShaderSource(fsh, 1, &fshSrc, null);
glCompileShader(fsh);
glAttachShader(program, fsh);
glLinkProgram(program);
glUseProgram(program);
auto position = glGetAttribLocation(program, "position");
auto run = true;
while (run) {
SDL_Event event;
while (SDL_PollEvent(&event)) {
switch (event.type) {
case SDL_QUIT:
run = false;
default:
break;
}
}
glClearColor(1, 0.9, 0.8, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glEnableVertexAttribArray(position);
glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, vertices.sizeof, null);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, ibo);
glDrawElements(GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, null);
glDisableVertexAttribArray(position);
SDL_GL_SwapWindow(sdlwindow);
}
}
On this line:
glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, vertices.sizeof, null);
are you sure that you want vertices.sizeof, which has a value of 16? In D, a dynamic array is a struct with two members (ptr and length). You probably want either float.sizeof or float.sizeof * 2.
And the same goes for your BufferData calls.