I have created a simple wxGLCanvas for demonstrating OpenGl using wxWidgets. The demo is working fine except when resizing the window the memory usage increases from a few megabytes to almost 400 megabytes and it stays there and doesn't decrease, here are the code snippets.
// ctor
TriangleCanvas::TriangleCanvas(wxWindow* parent, wxGLAttributes& attribList)
: wxGLCanvas(parent, attribList, wxID_ANY, { 0,0 }, wxDefaultSize),
m_vbo(0), m_vao(0), ctx_attr(new wxGLContextAttrs)
{
ctx_attr->CoreProfile().OGLVersion(4, 3).EndList();
m_context = new wxGLContext(this, NULL, ctx_attr);
Bind(wxEVT_PAINT, &TriangleCanvas::OnPaint, this);
Bind(wxEVT_SIZE, &TriangleCanvas::Resize, this);
}
// Paint method
void TriangleCanvas::OnPaint(wxPaintEvent& event)
{
wxPaintDC(this);
SetCurrent(*m_context);
shader->use();
// set background to black
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// draw the graphics
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_ebo);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
glFlush();
SwapBuffers();
}
void TriangleCanvas::Resize(wxSizeEvent& event) {
event.Skip();
glViewport(0, 0, event.GetSize().x, event.GetSize().y);
if (!setup) {
InitializeGLEW();
SetupGraphics();
}
}
I think the best way to use wxGLCanvas with an extension loader is to use a helper class and keep all OpenGL drawing in the cpp portion of that helper class.
For example, here is a small helper class for drawing a triangle:
glhelper.h
#ifndef GLHELPER_H_INCLUDED
#define GLHELPER_H_INCLUDED
class GLHelper
{
public:
bool InitGlew();
void Render();
void SetSize(int w, int h);
bool InitData();
void Cleanup();
private:
unsigned int m_VBO, m_VAO, m_shaderProgram;
};
#endif // GLHELPER_H_INCLUDED
glhelper.cpp
#include <GL/glew.h>
#ifdef __WXMSW__
#include <GL/wglew.h>
#elif defined(__WXGTK__)
#include <GL/glxew.h>
#endif // defined
#include "glhelper.h"
static const char *vertexShaderSource = "#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);\n"
"}\0";
static const char *fragmentShaderSource = "#version 330 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(1.0f, 0.5f, 0.2f, 1.0f);\n"
"}\n\0";
bool GLHelper::InitGlew()
{
GLenum initStatus = glewInit();
return initStatus == GLEW_OK;
}
bool GLHelper::InitData()
{
unsigned int vertexShader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertexShader, 1, &vertexShaderSource, NULL);
glCompileShader(vertexShader);
// check for shader compile errors
int success;
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &success);
if (!success)
{
return false;
}
// fragment shader
unsigned int fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragmentShader, 1, &fragmentShaderSource, NULL);
glCompileShader(fragmentShader);
// check for shader compile errors
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &success);
if (!success)
{
return false;
}
// link shaders
m_shaderProgram = glCreateProgram();
glAttachShader(m_shaderProgram, vertexShader);
glAttachShader(m_shaderProgram, fragmentShader);
glLinkProgram(m_shaderProgram);
// check for linking errors
glGetProgramiv(m_shaderProgram, GL_LINK_STATUS, &success);
if (!success) {
return false;
}
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
// set up vertex data (and buffer(s)) and configure vertex attributes
// ------------------------------------------------------------------
float vertices[] = {
-0.5f, -0.5f, 0.0f, // left
0.5f, -0.5f, 0.0f, // right
0.0f, 0.5f, 0.0f // top
};
//unsigned int VBO, VAO;
glGenVertexArrays(1, &m_VAO);
glGenBuffers(1, &m_VBO);
// bind the Vertex Array Object first, then bind and set vertex buffer(s),
// and then configure vertex attributes(s).
glBindVertexArray(m_VAO);
glBindBuffer(GL_ARRAY_BUFFER, m_VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
// note that this is allowed, the call to glVertexAttribPointer registered
//VBO as the vertex attribute's bound vertex buffer object so afterwards we
//can safely unbind
glBindBuffer(GL_ARRAY_BUFFER, 0);
// You can unbind the VAO afterwards so other VAO calls won't accidentally
//modify this VAO, but this rarely happens. Modifying other VAOs requires a
//call to glBindVertexArray anyways so we generally don't unbind VAOs (nor
// VBOs) when it's not directly necessary.
glBindVertexArray(0);
return true;
}
void GLHelper::Cleanup()
{
glDeleteVertexArrays(1, &m_VAO);
glDeleteBuffers(1, &m_VBO);
glDeleteProgram(m_shaderProgram);
}
void GLHelper::Render()
{
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
// draw our first triangle
glUseProgram(m_shaderProgram);
glBindVertexArray(m_VAO);
// seeing as we only have a single VAO there's no need to bind it every
//time, but we'll do so to keep things a bit more organized
glDrawArrays(GL_TRIANGLES, 0, 3);
}
void GLHelper::SetSize(int width, int height)
{
glViewport(0, 0, width, height);
}
And here is a short demo that uses this helper class to draw the trinagle.
demo.cpp
#include "wx/wx.h"
#include <wx/glcanvas.h>
#include "glhelper.h"
class wxGlewFrame: public wxFrame
{
public:
wxGlewFrame(wxWindow*);
~wxGlewFrame();
private:
void OnCanvasSize(wxSizeEvent&);
void OnCanvasPaint(wxPaintEvent&);
void InitGL();
wxGLCanvas* m_canvas;
wxGLContext* m_context;
GLHelper m_helper;
};
wxGlewFrame::wxGlewFrame(wxWindow* parent)
: wxFrame(parent, wxID_ANY, wxString())
{
// Create the canvas and context.
#if wxCHECK_VERSION(3,1,0)
// These settings should work with any GPU from the last 10 years.
wxGLAttributes dispAttrs;
dispAttrs.PlatformDefaults().RGBA().DoubleBuffer().EndList();
wxGLContextAttrs cxtAttrs;
cxtAttrs.PlatformDefaults().CoreProfile().OGLVersion(3, 3).EndList();
m_canvas = new wxGLCanvas(this, dispAttrs);
m_context = new wxGLContext(m_canvas, NULL, &cxtAttrs);
if ( !m_context->IsOK() )
{
SetTitle("Failed to create context.");
return;
}
#else
int dispAttrs[] = { WX_GL_RGBA, WX_GL_DOUBLEBUFFER, WX_GL_CORE_PROFILE,
WX_GL_MAJOR_VERSION ,3, WX_GL_MINOR_VERSION, 3, 0 };
m_canvas = new wxGLCanvas(this, wxID_ANY, dispAttrs);
m_context = new wxGLContext(m_canvas, NULL);
// Unfortunately, there doesn't seem to be any way to check if the
// context is ok prior to wxWidgets 3.1.0.
#endif // wxCHECK_VERSION
// On Linux, we must delay delay initialization until the canvas has
// been full created. On windows, we can finish now.
#ifdef __WXMSW__
InitGL();
#elif defined(__WXGTK__)
m_canvas->Bind(wxEVT_CREATE, [this](wxWindowCreateEvent&){InitGL();});
#endif // defined
}
wxGlewFrame::~wxGlewFrame()
{
m_helper.Cleanup();
delete m_context;
}
void wxGlewFrame::OnCanvasSize(wxSizeEvent& event)
{
wxSize sz = event.GetSize();
m_helper.SetSize(sz.GetWidth(), sz.GetHeight());
event.Skip();
}
void wxGlewFrame::OnCanvasPaint(wxPaintEvent&)
{
wxPaintDC dc(m_canvas);
m_helper.Render();
m_canvas->SwapBuffers();
}
void wxGlewFrame::InitGL()
{
// First call SetCurrent or GL initialization will fail.
m_context->SetCurrent(*m_canvas);
// Initialize GLEW.
bool glewInialized = m_helper.InitGlew();
if ( !glewInialized )
{
SetTitle("Failed it initialize GLEW.");
return;
}
SetTitle("Context and GLEW initialized.");
// Initialize the triangle data.
m_helper.InitData();
// Bind event handlers for the canvas. Binding was delayed until OpenGL was
// initialized because these handlers will need to call OpenGL functions.
m_canvas->Bind(wxEVT_SIZE, &wxGlewFrame::OnCanvasSize, this);
m_canvas->Bind(wxEVT_PAINT, &wxGlewFrame::OnCanvasPaint, this);
}
class MyApp : public wxApp
{
public:
virtual bool OnInit()
{
wxGlewFrame* frame = new wxGlewFrame(NULL);
frame->Show();
return true;
}
};
wxIMPLEMENT_APP(MyApp);
(This is basically the Hello Triangle sample from learnopengl.com except rewritten to use wxGLCanvas and GLEW instead of GLFW and GLAD.
This takes up about 19MB of memory on my system and only increases up to about 24 or 25MB when resizing. That might sound like a lot for such a simple program, but the running the official "Hello Triangle" sample uses 26MB. So I think the memory usage is about what should be expected.
Related
I'm working on developing code in OpenGL, and I was completing one of the tutorials for a lesson. However, the code that I completed did not color the triangle. Based off of the tutorial, my triangle should come out as green, but it keeps turning out white. I think there is an error in the code for my shaders, but I can't seem to find the error.
I tried altering the code a few times, and I even moved on to the next tutorial, which shades each vertex. However, my triangle is still coming out as white.
#include <iostream> //Includes C++ i/o stream
#include <GL/glew.h> //Includes glew header
#include <GL/freeglut.h> //Includes freeglut header
using namespace std; //Uses the standard namespace
#define WINDOW_TITLE "Modern OpenGL" //Macro for window title
//Vertex and Fragment Shader Source Macro
#ifndef GLSL
#define GLSL(Version, Source) "#version " #Version "\n" #Source
#endif
//Variables for window width and height
int WindowWidth = 800, WindowHeight = 600;
/* User-defined Function prototypes to:
* initialize the program, set the window size,
* redraw graphics on the window when resized,
* and render graphics on the screen
* */
void UInitialize(int, char*[]);
void UInitWindow(int, char*[]);
void UResizeWindow(int, int);
void URenderGraphics(void);
void UCreateVBO(void); //This step is missing from Tutorial 3-3
void UCreateShaders(void);
/*Vertex Shader Program Source Code*/
const GLchar * VertexShader = GLSL(440,
in layout(location=0) vec4 vertex_Position; //Receive vertex coordinates from attribute 0. i.e. 2
void main(){
gl_Position = vertex_Position; //Sends vertex positions to gl_position vec 4
}
);
/*Fragment Shader Program Source Code*/
const GLchar * FragmentShader = GLSL(440,
void main(){
gl_FragColor = vec4(0.0, 1.0, 0.0, 1.0); //Sets the pixels / fragments of the triangle to green
}
);
//main function. Entry point to the OpenGL Program
int main(int argc, char* argv[])
{
UInitialize(argc, argv); //Initialize the OpenGL program
glutMainLoop(); // Starts the Open GL loop in the background
exit(EXIT_SUCCESS); //Terminates the program successfully
}
//Implements the UInitialize function
void UInitialize(int argc, char* argv[])
{
//glew status variable
GLenum GlewInitResult;
UInitWindow(argc, argv); //Creates the window
//Checks glew status
GlewInitResult = glewInit();
if(GLEW_OK != GlewInitResult)
{
fprintf(stderr, "Error: %s\n", glewGetErrorString(GlewInitResult));
exit(EXIT_FAILURE);
}
//Displays GPU OpenGL version
fprintf(stdout, "INFO: OpenGL Version: %s\n", glGetString(GL_VERSION));
UCreateVBO(); //Calls the function to create the Vertex Buffer Object
UCreateShaders(); //Calls the function to create the Shader Program
//Sets the background color of the window to black. Optional
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
}
//Implements the UInitWindow function
void UInitWindow(int argc, char* argv[])
{
//Initializes freeglut
glutInit(&argc, argv);
//Sets the window size
glutInitWindowSize(WindowWidth, WindowHeight);
//Memory buffer setup for display
glutInitDisplayMode(GLUT_DEPTH | GLUT_DOUBLE | GLUT_RGBA);
//Creates a window with the macro placeholder title
glutCreateWindow(WINDOW_TITLE);
glutReshapeFunc(UResizeWindow); //Called when the window is resized
glutDisplayFunc(URenderGraphics); //Renders graphics on the screen
}
//Implements the UResizeWindow function
void UResizeWindow(int Width, int Height)
{
glViewport(0,0, Width, Height);
}
//Implements the URenderGraphics function
void URenderGraphics(void)
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); //Clears the screen
/*Creates the triangle*/
GLuint totalVertices = 3; //Specifies the number of vertices for the triangle i.e. 3
glDrawArrays(GL_TRIANGLES, 0, totalVertices); //Draws the triangle
glutSwapBuffers(); //Flips the back buffer with the front buffer every frame. Similar to GL Flush
}
//Implements the CreateVBO function
void UCreateVBO(void)
{
//Specifies coordinates for triangle vertices on x and y
GLfloat verts[] =
{
0.0f, 1.0f, //top-center of the screen
-1.0f, -1.0f, //bottom-left of the screen
1.0f, -1.0f //bottom-right of the screen
};
//Stores the size of the verts array / number of the coordinates needed for the triangle i.e. 6
float numVertices = sizeof(verts);
GLuint myBufferID; //Variable for vertex buffer object id
glGenBuffers(1, &myBufferID); //Creates 1 buffer
glBindBuffer(GL_ARRAY_BUFFER, myBufferID); //Activates the buffer
glBufferData(GL_ARRAY_BUFFER, numVertices, verts, GL_STATIC_DRAW); //Sends vertex or coordinate data to GPU
/*Creates the Vertex Attribute Pointer*/
GLuint floatsPerVertex = 2; //Number of coordinates per vertex
glEnableVertexAttribArray(0); //Specifies the initial position of the coordinates in the buffer
/*Instructs the GPU on how to handle the vertex bugger object data.
* Parameters: attribPointerPosition | coordinates per vertex | data type | deactivate normalization | 0 strides | 0 offset
*/
glVertexAttribPointer(0, floatsPerVertex, GL_FLOAT, GL_FALSE, 0, 0);
}
//Implements the UCreateShaders function
void UCreateShaders(void)
{
//Create a shader program object
GLuint ProgramId = glCreateProgram();
GLuint vertexShaderId = glCreateShader(GL_VERTEX_SHADER); //Create a Vertex Shader Object
GLuint fragmentShaderId = glCreateShader(GL_FRAGMENT_SHADER); //Create a Fragment Shader Object
glShaderSource(vertexShaderId, 1, &VertexShader, NULL); //Retrieves the vertex shader source code
glShaderSource(fragmentShaderId, 1, &FragmentShader, NULL); //Retrieves the fragment shader source code
glCompileShader(vertexShaderId); //Compile the vertex shader
glCompileShader(fragmentShaderId); //Compile the fragment shader
//Attaches the vertex and fragment shaders to the shader program
glAttachShader(ProgramId, vertexShaderId);
glAttachShader(ProgramId, fragmentShaderId);
glLinkProgram(ProgramId); //Links the shader program
glUseProgram(ProgramId); //Uses the shader program
}
When completed correctly, the code should result in a solid green triangle.
The variable gl_FragColor is unavailable in GLSL 4.4 core profile since it was deprecated. Because you don't specify a compatibility profile, the default core is assumed. Either use
#version 440 compatibility
for your shaders, or, even better, use the GLSL 4.4 onwards approach:
#version 440 core
layout(location = 0) out vec4 OUT;
void main(){
OUT = vec4(0.0, 1.0, 0.0, 1.0);
}
Searched Stack Overflow for similar questions in search of my solution , but it doesn't seem to be solved.
main.cpp :
#include"reader.h"
#include"window.h"
#include"shader.h"
int main() {
float vertices[] = {
-0.5f, -0.5f, 0.0f ,
0.5f, -0.5f, 0.0f ,
0.0f, 0.5f, 0.0f
};
Window window;
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
Shader shader;
shader.addShader("./src/shaders/basic.vtx",GL_VERTEX_SHADER);
shader.addShader("./src/shaders/basic.frg", GL_FRAGMENT_SHADER);
shader.compile();
shader.enable();
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices,GL_STATIC_DRAW);
GLint pos_in = glGetAttribLocation(shader.getProgram(), "pos_in");
if (pos_in < 0) {
std::cout << "pos_in not found\n";
}
glVertexAttribPointer(pos_in, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void *)0);
glEnableVertexAttribArray(pos_in);
while (!window.closed()) {
window.update();
glDrawArrays(GL_TRIANGLES,0,3);
}
return 0;
}
shader.h :
#pragma once
#include<glad/glad.h>
#include<iostream>
#include<vector>
#include"reader.h"
class Shader {
std::vector<GLuint*> shaders;
GLuint program;
public :
GLuint& getProgram() {
return program;
}
Shader() {
program = glCreateProgram();
}
void addShader(const char * path, GLenum type) {
std::string data = ShaderReader(path).read_shader();
const char * chardata = data.c_str();
GLuint shader = glCreateShader(type);
glShaderSource(shader, 1, &chardata , nullptr);
glCompileShader(shader);
int success;
char buffer[512];
glGetShaderiv(shader, GL_COMPILE_STATUS, &success);
if (!success) {
glGetShaderInfoLog(shader, 512, NULL, buffer);
std::cout << buffer << std::endl;
return;
}
std::cout << "shader inserted into vector\n";
shaders.push_back(&shader);
}
void compile(){
for (int i = 0; i != shaders.size();i++) {
glAttachShader(program, *shaders[i]);
}
glLinkProgram(program);
glValidateProgram(program);
glUseProgram(program);
int status;
glGetProgramiv(program, GL_COMPILE_STATUS, &status);
char buffer[512];
if (!status) {
glGetProgramInfoLog(program,512,NULL,buffer);
std::cout << buffer << std::endl;
return;
}
std::cout << "shader compilation successful\n";
}
void enable() {
glUseProgram(program);
}
void disable() {
glUseProgram(0);
}
~Shader() {
for (int i = 0; i != shaders.size();i++) {
glDeleteShader(*shaders[i]);
}
}
};
vertex shader written in basic.vtx:
#version 400
layout (location = 0 ) in vec3 pos_in ;
void main(){
gl_Position = vec4(pos_in.x , pos_in.y , pos_in.z , 1.0f);
}
fragment shader written in basic.frg :
#version 400
out vec4 color;
void main(){
color = vec4(0.0f, 0.5f , 0.5f , 1.0f);
}
At the time of calling glGetAttribLocation , the vertex shader IS USING pos_in attrib to set gl_Position , yet it returns -1 .
Also the triangle is not rendered when calling glGetAttribLocation() ; or rendered white with direct attrib pointer values like 0 , 1 with an openGL 1281 error.
One thing that is definitely not valid is the shaders.push_back(&shader):
void addShader(const char * path, GLenum type) {
// ....
GLuint shader = glCreateShader(type);
// ....
shaders.push_back(&shader);
}
With the shaders.push_back(&shader) you push back the address of a local variable to the shaders vector. So the glAttachShader(program, *shaders[i]); will result in undefined behavior.
shader holds only an numeric id so there is no need to get a pointer to that, just change the std::vector<GLuint*> shaders to std::vector<GLuint> shaders, use shaders.push_back(shader) and replace all *shaders[i] with shaders[i]
The reason why you don't get an linking error is most likely because the content at the address you get from &shader is not overwritten before you do the glAttachShader(program, *shaders[i]), and that both entries in the shaders vector hold same address of the stack. The result of is that glAttachShader is called each time on the same id, so you bind only the fragement shader to the program.
I am writing a openGL program(C++) which draws a ground with two 3D objects above it. The programming tool I use is Xcode version 8.0(8A218a)(OSX 10.11.6).
my code(main.cpp):
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <iostream>
#include <fstream>
using namespace std;
using glm::vec3;
using glm::mat4;
GLint programID;
//initialize all OpenGL objects
GLuint groundVAO, groundVBO, groundEBO; //ground
bool checkStatus( //OK
GLuint objectID,
PFNGLGETSHADERIVPROC objectPropertyGetterFunc,
PFNGLGETSHADERINFOLOGPROC getInfoLogFunc,
GLenum statusType)
{
GLint status;
objectPropertyGetterFunc(objectID, statusType, &status);
if (status != GL_TRUE)
{
GLint infoLogLength;
objectPropertyGetterFunc(objectID, GL_INFO_LOG_LENGTH, &infoLogLength);
GLchar* buffer = new GLchar[infoLogLength];
GLsizei bufferSize;
getInfoLogFunc(objectID, infoLogLength, &bufferSize, buffer);
cout << buffer << endl;
delete[] buffer;
return false;
}
return true;
}
bool checkShaderStatus(GLuint shaderID) //OK
{
return checkStatus(shaderID, glGetShaderiv, glGetShaderInfoLog, GL_COMPILE_STATUS);
}
bool checkProgramStatus(GLuint programID) //OK
{
return checkStatus(programID, glGetProgramiv, glGetProgramInfoLog, GL_LINK_STATUS);
}
string readShaderCode(const char* fileName) //OK
{
ifstream meInput(fileName);
if (!meInput.good())
{
cout << "File failed to load..." << fileName;
exit(1);
}
return std::string(
std::istreambuf_iterator<char>(meInput),
std::istreambuf_iterator<char>()
);
}
void installShaders() //OK
{
GLuint vertexShaderID = glCreateShader(GL_VERTEX_SHADER);
GLuint fragmentShaderID = glCreateShader(GL_FRAGMENT_SHADER);
const GLchar* adapter[1];
//adapter[0] = vertexShaderCode;
string temp = readShaderCode("VertexShaderCode.glsl");
adapter[0] = temp.c_str();
glShaderSource(vertexShaderID, 1, adapter, 0);
//adapter[0] = fragmentShaderCode;
temp = readShaderCode("FragmentShaderCode.glsl");
adapter[0] = temp.c_str();
glShaderSource(fragmentShaderID, 1, adapter, 0);
glCompileShader(vertexShaderID);
glCompileShader(fragmentShaderID);
if (!checkShaderStatus(vertexShaderID) ||
!checkShaderStatus(fragmentShaderID))
return;
programID = glCreateProgram();
glAttachShader(programID, vertexShaderID);
glAttachShader(programID, fragmentShaderID);
glLinkProgram(programID);
if (!checkProgramStatus(programID))
return;
glDeleteShader(vertexShaderID);
glDeleteShader(fragmentShaderID);
glUseProgram(programID);
}
void keyboard(unsigned char key, int x, int y)
{
//TODO:
}
void sendDataToOpenGL()
{
//TODO:
//create solid objects here and bind to VAO & VBO
//Ground, vertices info
const GLfloat Ground[]
{
-5.0f, +0.0f, -5.0f, //0
+0.498f, +0.898, +0.0f, //grass color
+5.0f, +0.0f, -5.0f, //1
+0.498f, +0.898, +0.0f,
+5.0f, +0.0f, +5.0f, //2
+0.498f, +0.898, +0.0f,
-5.0f, +0.0f, +5.0f
};
GLushort groundIndex[] = {1,2,3, 1,0,3};
//Pass ground to vertexShader
//VAO
glGenVertexArrays(1, &groundVAO);
glBindVertexArray(groundVAO);
//VBO
glGenBuffers(1, &groundVBO);
glBindBuffer(GL_ARRAY_BUFFER, groundVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Ground), Ground, GL_STATIC_DRAW);
//EBO
glGenBuffers(1, &groundEBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, groundEBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(groundIndex), groundIndex, GL_STATIC_DRAW);
//connectToVertexShader
glEnableVertexAttribArray(0); //position
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 6, 0);
glEnableVertexAttribArray(1); //color
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 6, (char*)(sizeof(float)*3));
}
void paintGL(void)
{
//TODO:
//render your objects and control the transformation here
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//translate model
glm::mat4 modelTransformMatrix = glm::translate(glm::mat4(), vec3(+0.0f, +0.0f, -3.0f));
//perspective view
glm::mat4 projectionMatrix = glm::perspective(+40.0f, +1.0f, +1.0f, +60.0f);
//ultimate matrix
glm::mat4 ultimateMatrix;
//register location on the graphics cards
GLint ultimateMatrixUniformLocation = glGetUniformLocation(programID, "ultimateMatrix");
/*GLint modelTransformMatrixUniformLocation = glGetUniformLocation(programID, "modelTransformMatrix");
GLint projectionMatrixUniformLocation = glGetUniformLocation(programID, "projectionMatrix");*/
//drawing the ground
/*glUniformMatrix4fv(modelTransformMatrixUniformLocation, 1, GL_FALSE, &modelTransformMatrix[0][0]);
glUniformMatrix4fv(projectionMatrixUniformLocation, 1, GL_FALSE, &projectionMatrix[0][0]);*/
glBindVertexArray(groundVAO);
ultimateMatrix = projectionMatrix * modelTransformMatrix;
glUniformMatrix4fv(ultimateMatrixUniformLocation, 1, GL_FALSE, &ultimateMatrix[0][0]);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, 0);
glFlush();
glutPostRedisplay();
}
void initializedGL(void) //run only once
{
glewInit();
glEnable(GL_DEPTH_TEST);
sendDataToOpenGL();
installShaders();
}
int main(int argc, char *argv[])
{
/*Initialization*/
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutCreateWindow("Try");
glutInitWindowSize(700, 700);
//const GLubyte* glversion = glGetString(GL_VERSION);
/*Register different CALLBACK function for GLUT to response
with different events, e.g. window sizing, mouse click or
keyboard stroke */
initializedGL();
//glewExperimental = GL_TRUE;
glutDisplayFunc(paintGL);
glutKeyboardFunc(keyboard);
/*Enter the GLUT event processing loop which never returns.
it will call different registered CALLBACK according
to different events. */
//printf("OpenGL ver: %s\n", glversion);
glutMainLoop();
return 0;
}
VertexShaderCode.glsl:
#version 430 // GLSL version your computer supports
in layout(location=0) vec3 position;
in layout(location=1) vec3 vertexColor;
uniform mat4 ultimateMatrix;
out vec3 theColor;
void main()
{
vec4 v = vec4(position, 1.0);
gl_Position = ultimateMatrix * v;
theColor = vertexColor;
}
FragmentShaderCode.glsl:
#version 430 //GLSL version your computer supports
out vec4 theColor2;
in vec3 theColor;
void main()
{
theColor2 = vec4(theColor, 1.0);
}
Functions: checkStatus, checkShaderStatus, checkProgramStatus, readShaderCode, installShaders should be all fine.
void keyboard() can be ignored since I havent implemented it yet(just for keyboard control).
I implemented the object "Ground" in sendDataToOpenGL(). But when I compiled and ran the program, "thread 1: exc_bad_access (code =1, address=0x0)" occured in the line of VAO:
And the pop-out window is just a white screen instead of a green grass(3d).
I have tried a method which was provided in other stackoverflow post: using glewExperimental = GL_TRUE;. I didnt see any errors by using that, but the popout screen vanished immediately just after it appeared. It seems that it couldnt help the problem.
Can someone give me a help? Thank you!
glGenVertexArrays is available in since OpenGL version 3.0. If vertex array objects are supported can be checked by glewGetExtension("GL_ARB_vertex_array_object").
Glew can enable additional extensions by glewExperimental = GL_TRUE;. See the GLEW documantation which says:
GLEW obtains information on the supported extensions from the graphics driver. Experimental or pre-release drivers, however, might not report every available extension through the standard mechanism, in which case GLEW will report it unsupported. To circumvent this situation, the glewExperimental global switch can be turned on by setting it to GL_TRUE before calling glewInit(), which ensures that all extensions with valid entry points will be exposed.
Add this to your code:
glewExperimental = GL_TRUE;
glewInit();
I'm trying to setup a game engine project. My visual studio project is setup so that I have an 'engine' project separate from my 'game' project. Then engine project is being compiled to a dll for the game project to use. I've already downloaded and setup glfw and glew to start using openGL. My problem is when ever I hit my first openGL function the program crashes. I know this has something to do with glewinit even though glew IS initializing successfully (no console errors). In my engine project, I have a window class where, upon window construction, glew should be setup:
Window.h
#pragma once
#include "GL\glew.h"
#include "GLFW\glfw3.h"
#if (_DEBUG)
#define LOG(x) printf(x)
#else
#define LOG(x)
#endif
namespace BlazeGraphics
{
class __declspec(dllexport) Window
{
public:
Window(short width, short height, const char* title);
~Window();
void Update();
void Clear() const;
bool Closed() const;
private:
int m_height;
int m_width;
const char* m_title;
GLFWwindow* m_window;
private:
Window(const Window& copy) {}
void operator=(const Window& copy) {}
};
}
Window.cpp (where glewinit() is called)
#include "Window.h"
#include <cstdio>
namespace BlazeGraphics
{
//Needed to define outside of the window class (not sure exactly why yet)
void WindowResize(GLFWwindow* window, int width, int height);
Window::Window(short width, short height, const char* title) :
m_width(width),
m_height(height),
m_title(title)
{
//InitializeWindow
{
if (!glfwInit())
{
LOG("Failed to initialize glfw!");
return;
};
m_window = glfwCreateWindow(m_width, m_height, m_title, NULL, NULL);
if (!m_window)
{
LOG("Failed to initialize glfw window!");
glfwTerminate();
return;
};
glfwMakeContextCurrent(m_window);
glfwSetWindowSizeCallback(m_window, WindowResize);
}
//IntializeGl
{
//This needs to be after two functions above (makecontextcurrent and setwindowresizecallback) or else glew will not initialize
**if (glewInit() != GLEW_OK)
{
LOG("Failed to initialize glew!");
}**
}
}
Window::~Window()
{
glfwTerminate();
}
void Window::Update()
{
glfwPollEvents();
glfwSwapBuffers(m_window);
}
void Window::Clear() const
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
//Returns a bool because glfwwindowShouldClose returns a nonzero number or zero
bool Window::Closed() const
{
//Made it equal to 1 to take away warning involving converting an int to bool
return glfwWindowShouldClose(m_window) == 1;
}
//Not part of window class so defined above
void WindowResize(GLFWwindow* window, int width, int height)
{
glViewport(0, 0, width, height);
}
}
Here is my main.cpp file which is found within my game project where I currently have my openGL functionality in global functions (just for now):
main.cpp
#include <iostream>
#include <array>
#include <fstream>
#include "GL\glew.h"
#include "GLFW\glfw3.h"
#include "../Engine/Source/Graphics/Window.h"
void initializeGLBuffers()
{
GLfloat triangle[] =
{
+0.0f, +0.1f, -0.0f,
0.0f, 1.0f, 0.0f,
-0.1f, -0.1f, 0.0f, //1
0.0f, 1.0f, 0.0f,
+0.1f, -0.1f, 0.0f, //2
0.0f, 1.0f, 0.0f,
};
GLuint bufferID;
glGenBuffers(1, &bufferID);
glBindBuffer(GL_ARRAY_BUFFER, bufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(triangle), triangle, GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, (sizeof(GLfloat)) * 6, nullptr);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, (sizeof(GLfloat)) * 6, (char*)((sizeof(GLfloat)) * 3));
GLushort indices[] =
{
0,1,2
};
GLuint indexBufferID;
glGenBuffers(1, &indexBufferID);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexBufferID);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
};
void installShaders()
{
//Create Shader
GLuint vertexShaderID = glCreateShader(GL_VERTEX_SHADER);
GLuint FragmentShaderID = glCreateShader(GL_FRAGMENT_SHADER);
//Add source or text file to shader object
std::string temp = readShaderCode("VertexShaderCode.glsl");
const GLchar* adapter[1];
adapter[0] = temp.c_str();
glShaderSource(vertexShaderID, 1, adapter, 0);
temp = readShaderCode("FragmentShaderCode.glsl").c_str();
adapter[0] = temp.c_str();
glShaderSource(FragmentShaderID, 1, adapter, 0);
//Compile Shadaer
glCompileShader(vertexShaderID);
glCompileShader(FragmentShaderID);
if (!checkShaderStatus(vertexShaderID) || !checkShaderStatus(FragmentShaderID))
return;
//Create Program
GLuint programID = glCreateProgram();
glAttachShader(programID, vertexShaderID);
glAttachShader(programID, FragmentShaderID);
//Link Program
glLinkProgram(programID);
if (!checkProgramStatus(programID))
{
std::cout << "Failed to link program";
return;
}
//Use program
glUseProgram(programID);
}
int main()
{
BlazeGraphics::Window window(1280, 720, "MyGame");
initializeGLBuffers();
installShaders();
while (!window.Closed())
{
window.Clear();
glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_SHORT, 0);
window.Update();
};
return 0;
}
Now if I were to move the glewinit() code here in my main.cpp:
int main()
{
BlazeGraphics::Window window(1280, 720, "MyGame");
if (glewInit() != GLEW_OK)
{
LOG("Failed to initialize glew!");
}
initializeGLBuffers();
installShaders();
while (!window.Closed())
{
window.Clear();
glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_SHORT, 0);
window.Update();
};
return 0;
}
then my program compiles fine. Why does trying to initialize glew within engine.dll cause a program crash? Thanks for any help.
GLEW works by defining a function pointer as global variable for each OpenGL function. Let's look at glBindBuffer as an example:
#define GLEW_FUN_EXPORT GLEWAPI
typedef void (GLAPIENTRY * PFNGLBINDBUFFERPROC) (GLenum target, GLuint buffer);
GLEW_FUN_EXPORT PFNGLBINDBUFFERPROC __glewBindBuffer;
So we just have a __glewBindBuffer function pointer, which will be set to the correct address from your OpenGL implementation by glewInit.
To actually be able to write glBindBuffer, GLEW simply defines pre-processor macros mapping the GL functions to those function pointer variables:
#define glBindBuffer GLEW_GET_FUN(__glewBindBuffer);
Why does trying to initialize glew within engine.dll cause a program crash?
Because your engine.dll and your main application each have a separate set of all of these global variables. You would have to export all the __glew* variables from your engine DLL to be able to get access to the results of your glewInit call in engine.dll.
So I started using OpenGL with glew and GLFW to create a game engine, and I almost immediately ran into a problem when starting working with shaders:
They are not being used or have no effect whatsoever if they are being used.
I have been checking my code with plenty of other examples, and they all match up, nothing looks out of place, and I am starting to run out of ideas and patience (I have been trying to figure out why for nearly a month now) with this.
My main core code is here:
#include "headers/Default.hpp"
//Window width and height variables
int windowWidth = 800;
int windowHeight = 600;
float Aspect = (float)windowWidth / (float)windowHeight;
//Buffer width and buffer height
int bufferWidth;
int bufferHeight;
double deltaTime;
double currentTime;
double newTime;
void CalculateDelta()
{
newTime = glfwGetTime();
deltaTime = newTime - currentTime;
currentTime = newTime;
}
//A call back function to get the window size
void UpdateWindowSize(GLFWwindow* window, int width, int height)
{
windowWidth = width;
windowHeight = height;
Aspect = (float)windowWidth / (float)windowHeight;
}
void UpdateFrameBufferSize(GLFWwindow* window, int width, int height)
{
bufferWidth = width;
bufferHeight = height;
}
//Starts on startup and creates an window context and starts the rendering loop
int main()
{
//Creates an engine startup log to keep
CreateStartupLog();
if (!glewInit())
{
WriteStartupLog("ERROR: GLEW failed to start\n");
return 1;
}
else
{
WriteStartupLog("INFO: GLEW initiated!\n");
}
//If glfw is not initiated for whatever reason we return an error
if (!glfwInit())
{
WriteStartupLog("ERROR: GLFW failed to start\n");
return 1;
}
else
{
WriteStartupLog("INFO: GLFW initiated!\n");
}
////////////////////////////////////////////////////////////////
// Window Section //
////////////////////////////////////////////////////////////////
//glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
//glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
//glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
//Gets the primary monitor of the PC and tells OpenGL to use that monitor
GLFWmonitor* monitor = glfwGetPrimaryMonitor();
const GLFWvidmode* videoMode = glfwGetVideoMode(monitor);
//Creates a GLFW window context that we can work with
GLFWwindow* gameWindow = glfwCreateWindow(windowWidth/*videoMode->width*/, windowHeight/*videoMode->height*/, "FireTech Engine", NULL/*monitor*/, NULL);
//If the game window is not able to be created, prints an error and terminates the program
if (!gameWindow)
{
WriteStartupLog("ERROR: GLFW could not create a window\n");
glfwTerminate();
return 1;
}
else
{
WriteStartupLog("INFO: GLFW created a window!\n\n");
}
//Makes the current context
glfwMakeContextCurrent(gameWindow);
//Sets the window callback function for size
glfwSetWindowSizeCallback(gameWindow, UpdateWindowSize);
glfwSetFramebufferSizeCallback(gameWindow, UpdateFrameBufferSize);
//Initiate GLEW
glewExperimental = GL_TRUE;
glewInit();
////////////////////////////////////////////////////////////////
// Functions to set up various systems of the game engine //
////////////////////////////////////////////////////////////////
//Calls function to create a log file for the game engine
CreateEngineLog();
//Calls the function to compile the default shaders
CompileDefaultShader();
//Calls the function to get and print out hardware and OpenGL version
//PrintHardwareInfo();
////////////////////////////////////////////////////////////////
// Game Code //
////////////////////////////////////////////////////////////////
Sprite testSprite;
//Rendering loop
while (!glfwWindowShouldClose(gameWindow))
{
CalculateDelta();
glClearColor(0.3, 0.6, 1.0, 0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//Viewport and ortho settings
glViewport(0, 0, windowWidth, windowHeight);
glOrtho(-1, 1, -1 / Aspect, 1 / Aspect, 0, 1);
//Draw a sprite
if (GLFW_PRESS == glfwGetKey(gameWindow, GLFW_KEY_F2))
{
testSprite.DebugDraw();
}
else
{
testSprite.Draw();
}
//Draws the stuff we just rendered
glfwSwapBuffers(gameWindow);
glLoadIdentity();
//Polls different events, like input for example
glfwPollEvents();
if (GLFW_PRESS == glfwGetKey(gameWindow, GLFW_KEY_F1))
{
int fps = GetFPS();
printf("FPS: ");
printf("%d\n", fps);
printf("Frequency: ");
printf("%f\n", 1/double(fps));
}
if (GLFW_PRESS == glfwGetKey(gameWindow, GLFW_KEY_ESCAPE))
{
glfwSetWindowShouldClose(gameWindow, 1);
}
}
glfwTerminate();
WriteEngineLog("PROGRAM EXITED: Window closed");
return 0;
}
Here is the shader.cpp code:
#include "../headers/Default.hpp"
string ReadShaderFile(char* path)
{
ifstream shaderFile;
shaderFile.open(path, std::ifstream::in);
string output;
if (shaderFile.is_open())
{
printf("Opened shader file located at: \"%s\"\n", path);
while (!shaderFile.eof())
{
output += shaderFile.get();
}
printf("Successfully read shader file located at: \"%s\"\n", path);
}
else
{
WriteEngineLog("ERROR: Could not read shader file!\n");
}
shaderFile.close();
return output;
}
Shader::Shader()
{
WriteEngineLog("WARNING: There was no path to any GLSL Shader files\n");
}
Shader::Shader(char* VertexShaderPathIn, char* FragmentShaderPathIn)
{
string vertexShaderString = ReadShaderFile(VertexShaderPathIn);
string fragmentShaderString = ReadShaderFile(FragmentShaderPathIn);
//Prints out the string to show the shader's code
printf("\n%s\n", vertexShaderString.c_str());
printf("\n%s\n", fragmentShaderString.c_str());
//Creates the GLchars needed to input the shader code
const GLchar* vertex_shader = vertexShaderString.c_str();
const GLchar* fragment_shader = fragmentShaderString.c_str();
//Creates a vertex shader and compiles it
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
WriteEngineLog("Blank vertex shader created\n");
glShaderSource(vertexShader, 1, &vertex_shader, NULL);
WriteEngineLog("Vertex shader given source\n");
glCompileShader(vertexShader);
//Compilation error checking begions here
GLint isVertexCompiled = 0;
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &isVertexCompiled);
if (isVertexCompiled == GL_FALSE)
{
//Gets the length of the log
GLint maxLength = 0;
glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH, &maxLength);
//Creates and writes the log to the errorLog
GLchar* errorLog = (GLchar*)malloc(maxLength);
glGetShaderInfoLog(vertexShader, maxLength, &maxLength, &errorLog[0]);
//Writes to the engine log with the shader error
WriteEngineLog("ERROR: Vertex shader failed to compile!\n");
printf("%s\n", (char*)errorLog);
//Frees the errorLog allocation
free(errorLog);
//Deletes the shader so it doesn't leak
glDeleteShader(vertexShader);
WriteEngineLog("ERROR: Aborting shader creation.\n");
return;
}
//Writes in the engine log to report successful compilation
WriteEngineLog("Vertex shader successfully compiled!\n");
//Creates a fragment shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
WriteEngineLog("Blank fragment shader created\n");
glShaderSource(fragmentShader, 1, &fragment_shader, NULL);
WriteEngineLog("Fragment shader given source\n");
glCompileShader(fragmentShader);
//Compilation error checking begions here
GLint isFragmentCompiled = 0;
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &isFragmentCompiled);
if (isFragmentCompiled == GL_FALSE)
{
//Gets the length of the log
GLint maxLength = 0;
glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH, &maxLength);
//Creates and writes the log to the errorLog
GLchar* errorLog = (GLchar*)malloc(maxLength);
glGetShaderInfoLog(vertexShader, maxLength, &maxLength, &errorLog[0]);
WriteEngineLog("ERROR: Fragment shader failed to compile\n");
printf("%s\n", (char*)errorLog);
//Frees the errorLog allocation
free(errorLog);
//Deletes the shader so it doesn't leak
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
WriteEngineLog("ERROR: Aborting shader creation.\n");
return;
}
//Writes in the engine log to report successful compilation
WriteEngineLog("Fragment shader successfully compiled!\n");
//Creates the final shader product
this->Program = glCreateProgram();
WriteEngineLog("Blank shader created\n");
glAttachShader(this->Program, vertexShader);
WriteEngineLog("Attatched Vertex shader to the shader\n");
glAttachShader(this->Program, fragmentShader);
WriteEngineLog("Attatched Fragment shader to the shader\n");
glLinkProgram(this->Program);
/*GLint isLinked = 0;
glGetProgramiv(this->Program, GL_LINK_STATUS, (int*)&isLinked);
if (isLinked == GL_FALSE)
{
//Gets the lngth of the shader info log
GLint maxLength = 0;
glGetProgramInfolog(ShaderOut, GL_INFO_LOG_LENGTH, &maxLength);
//Gets and puts the actual log into a GLchar
std::vector<GLchar> infoLog(maxLength);
glGetProgramInfoLog(ShaderOut, maxLength, &maxLength, &infoLog[0]);
//Deletes programs and shaders so they don't leak
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
WriteEngineLog((string)infoLog);
return;
}*/
WriteEngineLog("Shader linked!\n\n");
WriteEngineLog("INFO: Shader created!\n");
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
}
void Shader::Use()
{
glUseProgram(this->Program);
}
Here is the quad.cpp code:
#include "../headers/Default.hpp"
Quad::Quad()
{
position.x = 0;
position.y = 0;
scale.x = 1;
scale.y = 1;
VertexArray = CreateVertexArray();
}
//Quad constructor with one arg
Quad::Quad(Vector2 Position)
{
position = Position;
VertexArray = CreateVertexArray();
}
//Quad constructor with two args
Quad::Quad(Vector2 Position, Vector2 Scale)
{
position = Position;
scale = Scale;
VertexArray = CreateVertexArray();
}
GLuint Quad::CreateVertexArray()
{
GLfloat Vertices[] =
{
//VERTICES //COLORS //TEXCOORDS
0.5f, 0.5f, 0.0f, 0.0f, 0.0f, 0.0f, //1.0f, 1.0f, //Top Right Vertice
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f, //1.0f, 0.0f, //Top Left Vertice
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f//, 0.0f, 0.0f //Bottom Left Vertice
};
GLuint vbo, vao;
glGenVertexArrays(1, &vao);
glGenBuffers(1, &vbo);
glBindVertexArray(vao);
//Copy vertices into the buffer
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
//Attribute Pointers
//Position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)0);
glEnableVertexAttribArray(0);
//Color attribute
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)(3 * sizeof(GLfloat)));
glEnableVertexAttribArray(1);
//Unbinds the VAO
glBindVertexArray(0);
return vao;
}
//Quad debug drawing function
void Quad::DebugDraw()
{
//Use the default shader
DefaultShader.Use();
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
glBindVertexArray(VertexArray);
// draw points 0-3 from the currently bound VAO with current in-use shader
glDrawArrays(GL_TRIANGLES, 0, 3);
//glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0); //CAUSING A CRASH AT THE MOMENT
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
//Unbinds the VAO
glBindVertexArray(0);
}
Here is the sprite.cpp code:
#include "../headers/Default.hpp"
Sprite::Sprite()
{
position.x = 0;
position.y = 0;
}
Sprite::Sprite(Texture tex)
{
defaultTexture = tex;
currentTexture = tex;
}
Sprite::Sprite(Texture tex, Vector2 pos)
{
defaultTexture = tex;
currentTexture = tex;
position = pos;
}
Sprite::Sprite(Texture tex, Vector2 pos, Vector2 Scale)
{
defaultTexture = tex;
currentTexture = tex;
position = pos;
scale = Scale;
}
void Sprite::Draw()
{
//Binds the default shader again
glBindVertexArray(VertexArray);
//Use the default shader
DefaultShader.Use();
// draw points 0-3 from the currently bound VAO with current in-use shader
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
}
Here is my vertex shader and fragment shader code (In order):
//Vertex Shader
#version 330 core
layout (location = 0) in vec3 position; // The position variable has attribute position 0
layout (location = 1) in vec3 color;
out vec3 ourColor;
void main()
{
gl_Position = vec4(position, 1.0f); // See how we directly give a vec3 to vec4's constructor
ourColor = color;
}
//Fragment shader
#version 330 core
in vec3 ourColor;
out vec4 color;
void main()
{
color = ourColor;
}
And I'm getting a warning that my shader did not compile... error is that there is a non ascii character at line ZERO of the vertex shader.
I had exactly the same error. This is almost certainly due to Unicode Byte Order Marks, or similar unprinted characters generated by text editors.
These are common in the first characters of a unicode file, but can occur anywhere.
You can programmatically strip these from your shader source strings before compiling, but this could be costly if you are compiling many shaders. See the above link for the data to strip if you go this route.
An alternative is simply to keep the files in ANSI/ASCII format. I am sure most text editors have the facility to set/convert formats, but I will give Notepad++ as an example since it's what I use to edit GLSL:
Open the GLSL file.
Encoding -> Convert to ANSI. (Note that merely hitting "Encode in ANSI" will not strip the characters)
Save the file.
The above should also strip other characters prone to confusing GLSL parsers (and C/C++ in general).
You could inform the user(/developer) the files are in an incorrect format on load in debug builds.