Custom OpenGL buffer class doesn't display anything - c++

I wanted to create a class to handle OpenGL buffers like vertex buffers objects or colorbuffers.
Here is the buffer.h:
#pragma once
#include <GL/glew.h>
class glBuffer
{
public:
glBuffer(GLenum target);
void setdata(const void *data, GLenum mode);
void bind(GLuint index, GLint valuePerVertex, GLenum variableType = GL_FLOAT, GLsizei stride = 0, int offset = 0);
void unbind();
GLuint getBufferID() const;
~glBuffer();
private:
bool m_active;
GLuint m_buffer;
GLuint m_index;
GLenum m_target;
};
And buffer.cpp:
#include "buffer.h"
#include <GL/glew.h>
#include <iostream>
glBuffer::glBuffer(GLenum target)
{
m_target = target;
m_active = false;
glGenBuffers(1, &m_buffer);
}
void glBuffer::setdata(const void *data, GLenum mode)
{
glBindBuffer(m_target, m_buffer);
glBufferData(m_target, sizeof(data), data, mode);
glBindBuffer(m_target, 0);
}
void glBuffer::bind(GLuint index, GLint valuePerVertex, GLenum variableType, GLsizei stride, int offset)
{
m_active = true;
m_index = index;
glEnableVertexAttribArray(m_index);
glBindBuffer(m_target, m_buffer);
glVertexAttribPointer(
m_index,
valuePerVertex,
variableType,
GL_FALSE, //normalized?
stride,
(void*)offset //buffer offset
);
}
void glBuffer::unbind()
{
m_active = false;
glBindBuffer(m_target, 0);
glDisableVertexAttribArray(m_index);
}
GLuint glBuffer::getBufferID() const
{
return m_buffer;
}
glBuffer::~glBuffer()
{
if (!m_active){
unbind();
}
glDeleteBuffers(1, &m_buffer);
}
Here is how I use it in my application, where I #include "buffer.h" :
glBuffer vbo(GL_ARRAY_BUFFER);
vbo.setdata(color_buffer_data, GL_STATIC_DRAW);
vbo.bind(0, 3);
Replaces :
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertex_buffer_data), vertex_buffer_data, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(
0, // attribute 0. No particular reason for 0, but must match the layout in the shader.
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
When I compile and run it, I get a black window without anything drawn.
What is happening?
PS: I'm using vs, glfw3, glew.

Here is your working code for setting the buffer data
glBufferData(GL_ARRAY_BUFFER, sizeof(vertex_buffer_data), vertex_buffer_data, GL_STATIC_DRAW);
I am assuming that your vertex_buffer_data is an array which is why this works. Since you cast this to a void* you cannot simple call sizeof on the pointer. What you need is the size of the entire array in bytes.
Here is your function in your class that does not work
void glBuffer::setdata(const void *data, GLenum mode)
{
glBindBuffer(m_target, m_buffer);
glBufferData(m_target, sizeof(data), data, mode);
glBindBuffer(m_target, 0);
}
This is because the sizeof(data) is not the same as in first case. It is either 4 (32-bit) or 8 (64-bit) as pointed out by #genpfault. The simple solution would be to change your function as below.
void glBuffer::setdata(const void *data, int numElements, size_t elementSize, GLenum mode)
{
glBindBuffer(m_target, m_buffer);
glBufferData(m_target, numElements * elementSize, data, mode);
glBindBuffer(m_target, 0);
}
in this function 'numElements' is the number of elements in the array that your void* data points to and 'elementSize' is the size of each element.
Here is an example code for the above function
float vertex_buffer_data[9] = {0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f};
glBuffer vbo(GL_ARRAY_BUFFER);
vbo.setdata(vertex_buffer_data, 9, sizeof(float), GL_STATIC_DRAW);
vbo.bind(0, 3);
And it should work. If you are still confused, here is a small example program to demonstrate why your code did not work.
#include "stdafx.h"
int _tmain(int argc, _TCHAR* argv[])
{
int a[5] = {1, 2, 3, 4, 5};
void* ptr = a;
printf( " sizeof(a) = %d \n", sizeof(a));
printf( " sizeof(a[0]) = %d \n", sizeof(a[0]));
printf( " sizeof(ptr) = %d \n", sizeof(ptr));
getchar();
return 0;
}
Output :
sizeof(a) = 20
sizeof(a[0]) = 4
sizeof(ptr) = 4
Note: this was compiled on windows 32-bit in visual studio, hence the pointer size is 4 bytes. It would be 8 if I compiled it in 64-bit.

Related

access violation when i use my index buffer to draw objects [duplicate]

This question already has answers here:
OpenGL object in C++ RAII class no longer works
(2 answers)
Closed 1 year ago.
I am learning OpenGL and I am trying to abstract it to make it convenient for me to use it.
but I am getting access violations when I use my IndexBuffer class while rendering.
this is my code for IndexBuffer.h
class IndexBuffer
{
public:
IndexBuffer(void* data, int count);
IndexBuffer(int count);
IndexBuffer Bind();
IndexBuffer UnBind();
void AddData(void* data);
~IndexBuffer();
private:
int count;
size_t size;
unsigned int id;
};
and this is my code for IndexBuffer.cpp
#include "IndexBuffer.h"
#include "glew/glew.h"
IndexBuffer::IndexBuffer( void* data, int count): id(0), count(count), size(sizeof(unsigned int)* count)
{
glGenBuffers(1, &id);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, id);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned int) * count, data, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
IndexBuffer::IndexBuffer( int count) : id(0), count(count), size(sizeof(unsigned int)* count)
{
glGenBuffers(1, &id);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, id);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned int) * count, nullptr, GL_DYNAMIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
}
IndexBuffer IndexBuffer::Bind()
{
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, id);
return *this;
}
IndexBuffer IndexBuffer::UnBind()
{
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
return *this;
}
void IndexBuffer::AddData(void* data)
{
Bind();
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER, 0, sizeof(unsigned int) * count, data);
UnBind();
}
IndexBuffer::~IndexBuffer()
{
glDeleteBuffers(1, &id);
}
I wrote my VertexBuffer in the same way and it works fine, But my index buffer just doesn't work.
this is my main.cpp
#include <iostream>
#include "glew/glew.h"
#include "glfw/glfw3.h"
#include "VertexBuffer.h"
#include "IndexBuffer.h"
struct Vertex
{
float aPos[2];
};
int main() {
GLFWwindow* window;
if (glfwInit() == GLFW_FALSE)
{
return -1;
}
//glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_COMPAT_PROFILE);
window = glfwCreateWindow(600, 600, "Hello world", nullptr, nullptr);
glfwMakeContextCurrent(window);
if (glewInit() != GLEW_OK)
{
return -2;
}
unsigned int index[3] = {
0, 1, 2
};
Vertex data[] = {
Vertex({-0.5f, -0.5f}),
Vertex({ 0.5f, -0.5f}),
Vertex({ 0.0f, 0.5f})
};
unsigned int VertexArrayObject;
glGenVertexArrays(1, &VertexArrayObject);
VertexBuffer buffer = VertexBuffer(sizeof(Vertex) * 3);
IndexBuffer Ibuffer = IndexBuffer( 3);
buffer.Bind();
//glBufferData(GL_ARRAY_BUFFER, sizeof(float) * 6, data, GL_STATIC_DRAW);
buffer.AddData(data);
glBindVertexArray(VertexArrayObject);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(Vertex), (const void*)offsetof(Vertex, aPos));
//glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
buffer.UnBind();
Ibuffer.Bind();
Ibuffer.AddData(index);
Ibuffer.UnBind();
while (!glfwWindowShouldClose(window))
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
buffer.Bind();
Ibuffer.Bind();
glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_INT, nullptr);
glfwSwapBuffers(window);
glfwPollEvents();
}
glDeleteVertexArrays(1, &VertexArrayObject);
glfwDestroyWindow(window);
return 0;
}
can anybody help me out?
In short you have undefined behaviour.
Your classes doesn't support deep copy.
When Bind function returns object (i.e itself) by value, like:
IndexBuffer IndexBuffer::Bind()
destructor of IndexBuffer is called, which deletes previously allocated buffer, so buffer's id is dangled.
All Bind should return reference to instances:
IndexBuffer& IndexBuffer::Bind()

glDrawArrays does not draw anything in GL_TRIANGLES mode

My problem is the glDrawArray command stops working. I've written a few programs which I used that command. However now, I don't have any idea why it doesn't work.
But I still can draw lines to the screen with GL_LINES instead of GL_TRIANGLES.
Here is my code:
#include "Mesh.h"
#include <iostream>
Mesh::Mesh()
{
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glGenBuffers(1, &vbo);
size = 0;
}
void Mesh::AddVertices(const Vertex vertices[], int length)
{
size = length;
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
}
void Mesh::Draw()
{
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, Vertex::SIZE * 4, 0);
glDrawArrays(GL_TRIANGLES, 0, size);
glDisableVertexAttribArray(0);
}
Draw is always called by Game class. And my Vertex class:
#include "Vertex.h"
Vertex::Vertex(glm::vec3 pos)
{
this->pos = pos;
}
Vertex::Vertex(float x, float y, float z)
{
pos = glm::vec3(x, y, z);
}
glm::vec3 Vertex::GetPos() const
{
return pos;
}
void Vertex::SetPos(glm::vec3 pos)
{
this->pos = pos;
}
sizeof(vertices) is sizeof(void*), because in C or C++ arrays in function arguments decays to pointers. You should use length provided in second argument, if it is size in bytes.
I've found the solution. When I initialize my OpenGL functions, I have accidentally written glFrontFace(GL_CW) instead of glFrontFace(GL_CCW).

NVidia driver 320.86 texture buffer bug leads to crash

The following code crashes really quickly ( way before the max buffer size of 2^27 texels )
I stripped every useless line of code, to make it easier to read.
const int MAX_LAYER_DEPTH = 5;
#include "vapp.h"
#include "vmath.h"
#include <stdio.h>
BEGIN_APP_DECLARATION(OITDemo)
// Override functions from base class
virtual void Initialize(const char * title);
virtual void Display(bool auto_redraw);
virtual void Finalize(void);
virtual void Reshape(int width, int height);
GLuint linked_list_buffer;
GLuint linked_list_texture;
GLint current_width;
GLint current_height;
END_APP_DECLARATION()
DEFINE_APP(OITDemo, "Order Independent Transparency")
void OITDemo::Initialize(const char * title)
{
base::Initialize(title);
glGenBuffers(1, &linked_list_buffer);
glGenTextures(1, &linked_list_texture);
Reshape(100,100);
return;
}
void OITDemo::Display(bool auto_redraw)
{
glClearColor(1.0f, 1.0f, 1.0f, 0.0f);
glBindImageTexture(1, linked_list_texture, 0, GL_FALSE, 0, GL_WRITE_ONLY, GL_RGBA32UI);
glClear(GL_DEPTH_BUFFER_BIT|GL_COLOR_BUFFER_BIT);
base::Display();
return;
}
void OITDemo::Reshape(int width, int height)
{
current_width = width;
current_height = height;
glBindImageTexture(1, 0, 0, GL_FALSE, 0, GL_WRITE_ONLY, GL_RGBA32UI);
static GLuint texBufferSize = 2047;
++texBufferSize;
printf("%d : texBufferSize\n",texBufferSize);
glBindBuffer(GL_TEXTURE_BUFFER, linked_list_buffer);
glBufferData(GL_TEXTURE_BUFFER, texBufferSize * texBufferSize * MAX_LAYER_DEPTH * sizeof(vmath::vec4), NULL, GL_DYNAMIC_DRAW);
glBindBuffer(GL_TEXTURE_BUFFER, 0);
// Bind it to a texture (for use as a TBO)
glBindTexture(GL_TEXTURE_BUFFER, linked_list_texture);
glTexBuffer(GL_TEXTURE_BUFFER, GL_RGBA32UI, linked_list_buffer);
glBindTexture(GL_TEXTURE_BUFFER, 0);
glViewport(0, 0, current_width, current_height);
return;
}
void OITDemo::Finalize(void)
{
glDeleteTextures(1, &linked_list_texture);
glDeleteBuffers(1, &linked_list_buffer);
}
The driver most probably can't handle fragmentation
It crashes between reallocation of 21694445 ( 2083 x 2083 x 5 ) and 23587920 elements. The maximum buffer size ( number of texels ) returned by the graphic card is 2^27 ( 134 millions texels )
It seems to work better if we allocate one big buffer at the start of the application and never change it.
But fails miserably if we try to reallocate it during the life of the application.
Originally the code binds the image texture then trace using a shader that uses that image texture with imageStore but I discovered that I don't need any shader to make the driver crash.
Any clue to predict/prevent the driver crash?

OpenGL VBO Drawing

I seem to be having some trouble drawing objects in OpenGL using VBOs. I've attempted to copy the example from: http://www.opengl.org/wiki/VBO_-_just_examples (number 2) but I can't get a plane to appear on screen.
Vertex.h:
#include <freeglut>
struct Vertex {
GLfloat position[3];
GLfloat normal[3];
GLfloat *uvs[2];
unsigned short uvCount;
};
Triangles.h:
#include <GL/glew.h>
#include "Vertex.h"
class Triangles {
public:
Triangles(GLuint program, Vertex *vertices, unsigned int vertexCount, unsigned int *indices[3], unsigned int indiceCount);
~Triangles();
void Draw();
private:
GLuint program;
GLuint VertexVBOID;
GLuint IndexVBOID;
GLuint VaoID;
unsigned int *indices[3];
unsigned int indiceCount;
};
Triangles.cpp:
#include "Triangles.h"
#include <stdio.h>
#include <stddef.h>
Triangles::Triangles(GLuint program, unsigned int *indices[3], unsigned int indiceCount) {
memcpy(this->indices, indices, sizeof(int) * indiceCount * 3);
this->indiceCount = indiceCount;
this->program = program;
glGenVertexArrays(1, &VaoID);
glBindVertexArray(VaoID);
glGenBuffers(1, &VertexVBOID);
glBindBuffer(GL_ARRAY_BUFFER, VertexVBOID);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertex) * vertexCount, vertices, GL_STATIC_DRAW);
GLuint attributeLocation = glGetAttribLocation(program, "position");
glEnableVertexAttribArray(attributeLocation);
glVertexAttribPointer(attributeLocation, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (GLvoid *)(offsetof(Vertex, position)));
attributeLocation = glGetAttribLocation(program, "normal");
glEnableVertexAttribArray(attributeLocation);
glVertexAttribPointer(attributeLocation, 3, GL_FLOAT, GL_FALSE, sizeof(Vertex), (GLvoid *)(offsetof(Vertex, normal)));
glGenBuffers(1, &IndexVBOID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, IndexVBOID);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned int) * 3 * indiceCount, indices, GL_STATIC_DRAW);
};
Triangles::~Triangles() {
glDisableVertexAttribArray(glGetAttribLocation(program, "position"));
glDisableVertexAttribArray(glGetAttribLocation(program, "normal"));
glDeleteBuffers(1, &VertexVBOID);
glDeleteBuffers(1, &IndexVBOID);
glDeleteVertexArrays(1, &VaoID);
}
void Triangles::Draw() {
glBindVertexArray(VaoID);
glDrawElements(GL_TRIANGLES, indiceCount, GL_UNSIGNED_INT, 0);
};
Excerpt from main.cpp (creating triagle object):
Vertex vertices[4];
vertices[0].position[0] = -1;
vertices[0].position[1] = 1;
vertices[0].position[2] = 0;
vertices[0].normal[0] = 0;
vertices[0].normal[0] = 0;
vertices[0].normal[0] = 1;
vertices[0].uvCount = 0;
vertices[1].position[0] = 1;
vertices[1].position[1] = 1;
vertices[1].position[2] = 0;
vertices[1].normal[0] = 0;
vertices[1].normal[0] = 0;
vertices[1].normal[0] = 1;
vertices[1].uvCount = 0;
vertices[2].position[0] = 1;
vertices[2].position[1] = -1;
vertices[2].position[2] = 0;
vertices[2].normal[0] = 0;
vertices[2].normal[0] = 0;
vertices[2].normal[0] = 1;
vertices[2].uvCount = 0;
vertices[3].position[0] = -1;
vertices[3].position[1] = -1;
vertices[3].position[2] = 0;
vertices[3].normal[0] = 0;
vertices[3].normal[0] = 0;
vertices[3].normal[0] = 1;
vertices[3].uvCount = 0;
unsigned int **indices;
indices = new unsigned int*[2];
indices[0] = new unsigned int[3];
indices[0][0] = 0;
indices[0][1] = 1;
indices[0][2] = 2;
indices[1] = new unsigned int[3];
indices[1][0] = 2;
indices[1][1] = 3;
indices[1][2] = 0;
Triangles *t = new Triangles(program, vertices, 4 indices, 2);
createShader(GLenum , char *):
GLuint createShader(GLenum type, char *file) {
GLuint shader = glCreateShader(type);
const char *fileData = textFileRead(file);
glShaderSource(shader, 1, &fileData, NULL);
glCompileShader(shader);
return shader;
}
Shader loading:
GLuint v = createShader(GL_VERTEX_SHADER);
GLuint f = createShader(GL_FRAGMENT_SHADER, "fragmentShader.frag");
program = glCreateProgram();
glAttachShader(program, v);
glAttachShader(program, f);
glLinkProgram(program);
glUseProgram(program);
vertexShader.vert:
in vec3 position;
in vec3 normal;
out vec3 a_normal;
void main() {
gl_Position = vec4(position, 1.0);
}
fragmentShader.frag:
in vec3 a_normal;
out vec4 out_color;
void main() {
out_color = vec4(1.0, 1.0, 1.0, 1.0);
}
Please let me know if more code is needed. As a side note everything compiles just fine, I just don't see the plane that I have constructed on screen (maybe because I didn't use colors?)
My OpenGL information is as follows:
Vendor: ATI Technologies Inc.
Renderer: ATI Radeon HD 5700 Series
Version: 3.2.9756 Compatibility Profile Context
Extensions: extensions = GL_AMDX_name_gen_delete GL_AMDX_random_access_target GL_AMDX_vertex_shader_tessellator GL_AMD_conservative_depth GL_AMD_draw_buffers_blend GL_AMD_performance_monitor GL_AMD_seamless_cubemap_per_texture GL_AMD_shader_stencil_export GL_AMD_texture
In response to your comments:
Unfortunately I do not do error checking
You should always add some OpenGL error checking, it will save you from so many problems. It should look something like the following:
int err = glGetError();
if(err != 0) {
//throw exception or log message or die or something
}
I used matrix functions because I didn't realize the vertex shader would effect that. I assumed the matrix set to the matrix at the top of the stack (the one I pushed before drawing.)
This is an incorrect assumption. The only variable which references deprecated the matrix stack is special (though deprecated) variable gl_ModelViewProjectionMatrix. What you currently have there is just an unused, uninitialized matrix, which is totally ignoring your matrix stack.
As for indices, I'm not exactly sure what you mean. I just drew the vertices on paper and decided the indices based on that.
I'm not referring to the indices of the triangle in your index buffer, but rather the first parameter to your glAttrib* functions. I suppose 'attribute location' is a more correct term than index.
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, ... //attrib location 0
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, ... //attrib location 1
You seem to just be randomly assuming that "0" and "1" map to "position" and "normal". This is not a safe assumption to make. You should be querying the attribute location values for "position" and "normal" with glGetAttribLocation, and then using that value to glEnableVertexAttribArray and glVertexAttribPointer.

Code Assist, OpenGL VAO/VBO Classes not drawing

Edit II:
Current Code works great! Thanks everyone. I went ahead and included my shader code for reference at the bottom though they do absolutely nothing at this point really.
I am trying to get up and going with OpenGL 4.1 and am still very early in development. Currently I'm not even really using 4.0 features yet in this project, so this is just as much an OpenGL 3 question as well.
The goal I was working on first was simply working out two classes to handle VAOs and VBOs. I had some misconceptions but finally got past the blank screen.
/* THIS CODE IS NOW FULLY FUNCTIONAL */
/* well, fully is questionable lol, should work out of the box with glew and glfw */
/* A simple function that will read a file into an allocated char pointer buffer */
/* Borrowed from OpenGL.org tutorial */
char* filePull(char *file)
{
FILE *fptr;
long length;
char *buf;
fptr = fopen(file, "r"); /* Open file for reading */
if (!fptr) /* Return NULL on failure */
return NULL;
fseek(fptr, 0, SEEK_END); /* Seek to the end of the file */
length = ftell(fptr); /* Find out how many bytes into the file we are */
buf = (char*)malloc(length+1); /* Allocate a buffer for the entire length of the file and a null terminator */
fseek(fptr, 0, SEEK_SET); /* Go back to the beginning of the file */
fread(buf, length, 1, fptr); /* Read the contents of the file in to the buffer */
fclose(fptr); /* Close the file */
buf[length] = 0; /* Null terminator */
return buf; /* Return the buffer */
}
class VBO
{
public:
GLuint buffer;
bool isBound;
vector<void*> belongTo;
vector<GLfloat> vertex;
GLenum usage;
void Load()
{ glBufferData(GL_ARRAY_BUFFER, vertex.size()*sizeof(GLfloat), &vertex[0], usage); }
void Create(void* parent)
{
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, vertex.size()*sizeof(GLfloat), &vertex[0], usage);
isBound=true;
belongTo.push_back(parent);
}
void Activate()
{
if(!isBound) glBindBuffer(GL_ARRAY_BUFFER, buffer);
isBound=true;
}
void Deactivate(){ glBindBuffer(GL_ARRAY_BUFFER, 0); }
VBO() : isBound(false), usage(GL_STATIC_DRAW)
{ }
~VBO() { }
private:
};
class VAO
{
public:
GLuint buffer;
string key;
unsigned long long cursor;
vector<VBO> child;
void Create()
{
glGenVertexArrays(1, &buffer);
for(unsigned int i=0; i<child.size(); i++)
child[i].Create(this);
}
void Activate()
{
glBindVertexArray(buffer);
for(unsigned int i=0; i<child.size(); i++)
child[i].Activate();
}
void Release(){ glBindVertexArray(0); }
void Remove(){ glDeleteVertexArrays(1, &buffer); }
VAO() : buffer(1) { }
~VAO() { }
private:
};
int main()
{
int width=640, height=480, frame=1; bool running = true;
glfwInit();
if( !glfwOpenWindow( width, height, 0, 0, 0, 0, 0, 0, GLFW_WINDOW ) )
{ glfwTerminate(); return 13; }
glfwSetWindowTitle("Genesis");
glewInit();
cout<<(GLEW_VERSION_4_1?"yes":"no"); //yes
GLchar *vsource, *fsource;
GLuint _vs, _fs;
GLuint Shader;
vsource = filePull("base.vert");
fsource = filePull("base.frag");
/* Compile Shaders */
_vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(_vs, 1, (const GLchar**)&vsource, 0);
glCompileShader(_vs);
// glGetShaderiv(_vs, GL_COMPILE_STATUS, &IsCompiled_VS);
_fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(_fs, 1, (const GLchar**)&fsource, 0);
glCompileShader(_fs);
/***************** ^ Vertex | Fragment v *********************/
glAttachShader(Shader, _vs);
glAttachShader(Shader, _fs);
// glGetShaderiv(_fs, GL_COMPILE_STATUS, &IsCompiled_FS);
glBindAttribLocation(Shader, 0, "posIn");
glLinkProgram(Shader);
// glGetProgramiv(shaderprogram, GL_LINK_STATUS, (int *)&IsLinked);
VAO Object3D;
VBO myVBO[3];
glUseProgram(Shader);
for(int i=0; i<9; i++)
myVBO[0].vertex.push_back((i%9)*.11); //Arbitrary vertex values
Object3D.child.push_back(myVBO[0]);
Object3D.Create();
glClearColor( 0.7f, 0.74f, 0.77f, 0.0f ); //Black got lonely
int i=0; while(running)
{
frame++;
glfwGetWindowSize( &width, &height );
height = height > 0 ? height : 1;
glViewport( 0, 0, width, height );
glClear( GL_COLOR_BUFFER_BIT );
/* Bind, Draw, Unbind */
Object3D.Activate();
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, false, 0, 0);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 9);
Object3D.Release();
glfwSwapBuffers();
// exit if ESC was pressed or window was closed
running = !glfwGetKey(GLFW_KEY_ESC) && glfwGetWindowParam( GLFW_OPENED);
i++;
}
glUseProgram(0); glDisableVertexAttribArray(0);
glDetachShader(Shader, _vs); glDetachShader(Shader, _fs);
glDeleteProgram(Shader); glDeleteShader(_vs); glDeleteShader(_fs);
glDeleteVertexArrays(1, &Object3D.buffer);
glfwTerminate();
return 0;
}
Basically I'm just hoping to get anything on the screen at this point. I am using glfw and glew. Am I completely leaving some things out or do I only need to correct something? Code is somewhat mangled at the moment, sorry.
base.vert
// Fragment Shader – file "base.vert"
#version 300
in vec3 posIn;
out vec4 colorOut;
void main(void)
{
gl_Position = vec4(posIn, 1.0);
colorOut = vec4(3.0,6.0,4.0,1.0);
}
base.frag
// Vertex Shader – file "base.frag"
#version 300
out vec3 colorOut;
void main(void)
{
colorOut = vec3(1.0,10,1.0);
}
&vertex
vertex is a vector. Taking its address will not give you a pointer to the data.
Edit to add:
Right. It still does not work, because you have at least 2 more issues:
You don't call any gl*Pointer call. The GL won't know what it needs to pull from your vertex buffer objects
your vertex data that you put in your vertex array is 3 times the same vertex. A triangle with the 3 points at the same location:
for(int i=0; i<9; i++)
myVBO[0].vertex.push_back((i%3)*.2); //Arbitrary vertex values
It creates 3 (.0 .2 .4) vectors, all at the same location.
That iBound member of VBO looks suspicious. The OpenGL binding state may change, for example after switching the bound VAO, but the VBO class instance still thinks it's active. Just drop iBound altogether and re-bind every time you need the object. With modern drivers rebinding an already bound object is almost for free.