OpenGL ES2 black screen (C++, OSX, SDL2) - c++

Hate to make a "find my bug" post, but the internet seems to be severely lacking any examples for ultra-straightforward OpenGL ES2 triangle-drawing.
And technically, this is using OpenGL 2.1, but OpenGL ES2 is just a subset of that... right?
Anyways, here's my code (I stripped logging/error checking where I know it passes)
#include <SDL.h>
#include <SDL_opengl.h>
int main(int argc, char* argv[])
{
SDL_Init( SDL_INIT_VIDEO | SDL_INIT_EVENTS);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_COMPATIBILITY);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION,2);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION,1);
SDL_GL_SetAttribute(SDL_GL_ACCELERATED_VISUAL,1);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER,1);
SDL_Window* window = SDL_CreateWindow("Test", 0,0,100,100, SDL_WINDOW_OPENGL);
SDL_GLContext gl = SDL_GL_CreateContext(window);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
GLuint gl_program_id;
GLuint gl_vs_id;
GLuint gl_fs_id;
GLuint gl_pos_attrib_id;
GLuint gl_pos_buff_id;
char vs_file[2048]; //assume I successfully loaded vertex shader code into this array
char *vs_file_p = &vs_file[0];
char fs_file[2048]; //assume I successfully loaded fragment shader code into this array
char *fs_file_p = &fs_file[0];
gl_vs_id = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(gl_vs_id, 1, &vs_file_p, NULL);
glCompileShader(gl_vs_id);
gl_fs_id = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(gl_fs_id, 1, &fs_file_p, NULL);
glCompileShader(gl_fs_id);
gl_program_id = glCreateProgram();
glAttachShader(gl_program_id, gl_vs_id);
glAttachShader(gl_program_id, gl_fs_id);
glLinkProgram(gl_program_id);
glDeleteShader(gl_vs_id);
glDeleteShader(gl_fs_id);
glUseProgram(gl_program_id);
gl_pos_attrib_id = glGetAttribLocation(gl_program_id, "position");
float verts[] = {0.0,0.0,0.5,1.0,1.0,0.0};
glGenBuffers(1, &gl_pos_buff_id);
glBindBuffer(GL_ARRAY_BUFFER, gl_pos_buff_id);
glBufferData(GL_ARRAY_BUFFER, sizeof(float)*2*3, verts, GL_STATIC_DRAW);
glVertexAttribPointer(gl_pos_attrib_id, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(gl_pos_attrib_id);
glViewport(0,0,100,100);
glClearColor(0.,0.,0.,1.);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(gl_program_id);
glBindBuffer(GL_ARRAY_BUFFER, gl_pos_buff_id);
glBufferData(GL_ARRAY_BUFFER, sizeof(float)*2*3, verts, GL_STATIC_DRAW);
glVertexAttribPointer(gl_pos_attrib_id, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glEnableVertexAttribArray(gl_pos_attrib_id);
glDrawArrays(GL_TRIANGLES,0,3*3);
SDL_GL_SwapWindow(window);
//while loop waiting on input to quit goes here...
SDL_GL_DeleteContext(gl);
SDL_Quit();
exit(0);
return 0;
}
And my shaders are as follows:
Vertex:
#version 110
attribute vec2 position;
void main()
{
gl_Position = vec4(position,1.,1.);
}
Fragment:
#version 110
void main()
{
gl_FragColor = vec4(1.);
}
Just another note that all the shaders compile, the program compiles, the context and window creation all work without a hitch. It just ends up with a black 100x100px window. Any help would be greatly appreciated!
(also, if anyone can recommend decent, consistent, straightforward documentation sources on opengl es2 that would be awesome. So much inconsistency with docs between versions/platforms/whatever. thanks!)

Related

OpenGL Won't Render With Shaders Mac [closed]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 1 year ago.
Improve this question
I'm trying to get OpenGL to render a basic triangle with shaders on my newer Macbook Pro with the M1 chip. I'm stuck using Qt Creator as well.
I was able to set it up and get a basic Fixed-Pipeline scene rendering, but when I changed the version to use modern OpenGL I get that the version is 4.1, even though I set it to 3.2.
Here's how I set the version:
QSurfaceFormat format;
format.setDepthBufferSize(24);
format.setStencilBufferSize(8);
format.setProfile(QSurfaceFormat::CoreProfile);
format.setMajorVersion(3);
format.setMinorVersion(2);
QSurfaceFormat::setDefaultFormat(format);
this->setFormat(format); // must be called before the widget or its parent window gets shown
When I print out the shader sources I can see that I'm getting exactly what I put into my vertex shader and fragment shader files, so I know that I'm loading in the files correctly. Here's how I'm using them inside of my Shader class:
programID = glCreateProgram(); // Creates the program ID
vs = glCreateShader(GL_VERTEX_SHADER); // Creates the vertex shader
fs = glCreateShader(GL_FRAGMENT_SHADER); // Creates the fragment shader
// Shader reading code removed...
const char* vSrc = vSource.c_str();
int size = vSource.size();
glShaderSource(vs, 1, &vSrc, &size);
glCompileShader(vs);
int result;
glGetShaderiv(vs, GL_COMPILE_STATUS, &result);
if (result == GL_FALSE)
{
int length;
glGetShaderiv(vs, GL_INFO_LOG_LENGTH, &length);
char* msg = new char[length];
glGetShaderInfoLog(vs, length, &length, msg);
std::cout << "Vertex Shader Message:\n" << msg << std::endl;
exit(1);
}
const char* fSrc = fSource.c_str();
int size2 = fSource.size();
glShaderSource(fs, 1, &fSrc, &size2);
glCompileShader(fs);
int result2;
glGetShaderiv(fs, GL_COMPILE_STATUS, &result2);
if (result2 == GL_FALSE)
{
int length;
glGetShaderiv(fs, GL_INFO_LOG_LENGTH, &length);
char* msg = new char[length];
glGetShaderInfoLog(fs, length, &length, msg);
std::cout << "Fragment Shader Message:\n" << msg << std::endl;
exit(1);
}
glAttachShader(programID, vs);
glAttachShader(programID, fs);
glLinkProgram(programID);
glValidateProgram(programID);
And then in my initOpenGL function I have:
float positions[6] =
{
-0.5, -0.5,
0.0, 0.5,
0.5, -0.5
};
glGenBuffers(1, &currentBuffer); // Generates the buffer
glBindBuffer(GL_ARRAY_BUFFER, currentBuffer); // Binds it before we use it
glBufferData(GL_ARRAY_BUFFER, 6 * sizeof(float), positions, GL_STATIC_DRAW); // Sends the data to OpenGL
glEnableVertexAttribArray(0); // Enables the buffer layout
glBindBuffer(GL_ARRAY_BUFFER, currentBuffer);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0); // Tells OpenGL how we stored the data
s = new Shader("main.vsh", "main.fsh");
glUseProgram(s->getShaderID());
Then, I render the data with the following call in my render function:
glDrawArrays(GL_TRIANGLES, 0, 3);
Here is the vertex shader code:
#version 410 core
layout (location = 0) in vec4 position;
void main()
{
gl_Position = position;
}
Here is the fragment shader code:
#version 410 core
layout (location = 0) out vec4 color;
void main(void)
{
color = vec4(1.0, 0.0, 0.0, 1.0);
}
This is not a problem with the shaders. However in a core profile OpenGL Context you have to create Vertex Array Object, since the default VAO (0) is not valid. This is not optional:
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glEnableVertexAttribArray(0); // Enables the buffer layout
glBindBuffer(GL_ARRAY_BUFFER, currentBuffer);
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, sizeof(float) * 2, 0)
You have to bind the VAO before the vertex specification. The vertex specification is stored in the VAO.
The vertex specification stored in the currently bound VAO is used when drawing a mesh. Hence, you need to make sure the VAO is bound before the drawing command as well:
glBindVertexArray(vao);
glDrawArrays(GL_TRIANGLES, 0, 3);

OpenGL -- glGenVertexArrays, "thread 1: exc_bad_access (code =1, address=0x0)"

I am writing a openGL program(C++) which draws a ground with two 3D objects above it. The programming tool I use is Xcode version 8.0(8A218a)(OSX 10.11.6).
my code(main.cpp):
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <iostream>
#include <fstream>
using namespace std;
using glm::vec3;
using glm::mat4;
GLint programID;
//initialize all OpenGL objects
GLuint groundVAO, groundVBO, groundEBO; //ground
bool checkStatus( //OK
GLuint objectID,
PFNGLGETSHADERIVPROC objectPropertyGetterFunc,
PFNGLGETSHADERINFOLOGPROC getInfoLogFunc,
GLenum statusType)
{
GLint status;
objectPropertyGetterFunc(objectID, statusType, &status);
if (status != GL_TRUE)
{
GLint infoLogLength;
objectPropertyGetterFunc(objectID, GL_INFO_LOG_LENGTH, &infoLogLength);
GLchar* buffer = new GLchar[infoLogLength];
GLsizei bufferSize;
getInfoLogFunc(objectID, infoLogLength, &bufferSize, buffer);
cout << buffer << endl;
delete[] buffer;
return false;
}
return true;
}
bool checkShaderStatus(GLuint shaderID) //OK
{
return checkStatus(shaderID, glGetShaderiv, glGetShaderInfoLog, GL_COMPILE_STATUS);
}
bool checkProgramStatus(GLuint programID) //OK
{
return checkStatus(programID, glGetProgramiv, glGetProgramInfoLog, GL_LINK_STATUS);
}
string readShaderCode(const char* fileName) //OK
{
ifstream meInput(fileName);
if (!meInput.good())
{
cout << "File failed to load..." << fileName;
exit(1);
}
return std::string(
std::istreambuf_iterator<char>(meInput),
std::istreambuf_iterator<char>()
);
}
void installShaders() //OK
{
GLuint vertexShaderID = glCreateShader(GL_VERTEX_SHADER);
GLuint fragmentShaderID = glCreateShader(GL_FRAGMENT_SHADER);
const GLchar* adapter[1];
//adapter[0] = vertexShaderCode;
string temp = readShaderCode("VertexShaderCode.glsl");
adapter[0] = temp.c_str();
glShaderSource(vertexShaderID, 1, adapter, 0);
//adapter[0] = fragmentShaderCode;
temp = readShaderCode("FragmentShaderCode.glsl");
adapter[0] = temp.c_str();
glShaderSource(fragmentShaderID, 1, adapter, 0);
glCompileShader(vertexShaderID);
glCompileShader(fragmentShaderID);
if (!checkShaderStatus(vertexShaderID) ||
!checkShaderStatus(fragmentShaderID))
return;
programID = glCreateProgram();
glAttachShader(programID, vertexShaderID);
glAttachShader(programID, fragmentShaderID);
glLinkProgram(programID);
if (!checkProgramStatus(programID))
return;
glDeleteShader(vertexShaderID);
glDeleteShader(fragmentShaderID);
glUseProgram(programID);
}
void keyboard(unsigned char key, int x, int y)
{
//TODO:
}
void sendDataToOpenGL()
{
//TODO:
//create solid objects here and bind to VAO & VBO
//Ground, vertices info
const GLfloat Ground[]
{
-5.0f, +0.0f, -5.0f, //0
+0.498f, +0.898, +0.0f, //grass color
+5.0f, +0.0f, -5.0f, //1
+0.498f, +0.898, +0.0f,
+5.0f, +0.0f, +5.0f, //2
+0.498f, +0.898, +0.0f,
-5.0f, +0.0f, +5.0f
};
GLushort groundIndex[] = {1,2,3, 1,0,3};
//Pass ground to vertexShader
//VAO
glGenVertexArrays(1, &groundVAO);
glBindVertexArray(groundVAO);
//VBO
glGenBuffers(1, &groundVBO);
glBindBuffer(GL_ARRAY_BUFFER, groundVBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(Ground), Ground, GL_STATIC_DRAW);
//EBO
glGenBuffers(1, &groundEBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, groundEBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(groundIndex), groundIndex, GL_STATIC_DRAW);
//connectToVertexShader
glEnableVertexAttribArray(0); //position
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 6, 0);
glEnableVertexAttribArray(1); //color
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(float) * 6, (char*)(sizeof(float)*3));
}
void paintGL(void)
{
//TODO:
//render your objects and control the transformation here
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//translate model
glm::mat4 modelTransformMatrix = glm::translate(glm::mat4(), vec3(+0.0f, +0.0f, -3.0f));
//perspective view
glm::mat4 projectionMatrix = glm::perspective(+40.0f, +1.0f, +1.0f, +60.0f);
//ultimate matrix
glm::mat4 ultimateMatrix;
//register location on the graphics cards
GLint ultimateMatrixUniformLocation = glGetUniformLocation(programID, "ultimateMatrix");
/*GLint modelTransformMatrixUniformLocation = glGetUniformLocation(programID, "modelTransformMatrix");
GLint projectionMatrixUniformLocation = glGetUniformLocation(programID, "projectionMatrix");*/
//drawing the ground
/*glUniformMatrix4fv(modelTransformMatrixUniformLocation, 1, GL_FALSE, &modelTransformMatrix[0][0]);
glUniformMatrix4fv(projectionMatrixUniformLocation, 1, GL_FALSE, &projectionMatrix[0][0]);*/
glBindVertexArray(groundVAO);
ultimateMatrix = projectionMatrix * modelTransformMatrix;
glUniformMatrix4fv(ultimateMatrixUniformLocation, 1, GL_FALSE, &ultimateMatrix[0][0]);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, 0);
glFlush();
glutPostRedisplay();
}
void initializedGL(void) //run only once
{
glewInit();
glEnable(GL_DEPTH_TEST);
sendDataToOpenGL();
installShaders();
}
int main(int argc, char *argv[])
{
/*Initialization*/
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
glutCreateWindow("Try");
glutInitWindowSize(700, 700);
//const GLubyte* glversion = glGetString(GL_VERSION);
/*Register different CALLBACK function for GLUT to response
with different events, e.g. window sizing, mouse click or
keyboard stroke */
initializedGL();
//glewExperimental = GL_TRUE;
glutDisplayFunc(paintGL);
glutKeyboardFunc(keyboard);
/*Enter the GLUT event processing loop which never returns.
it will call different registered CALLBACK according
to different events. */
//printf("OpenGL ver: %s\n", glversion);
glutMainLoop();
return 0;
}
VertexShaderCode.glsl:
#version 430 // GLSL version your computer supports
in layout(location=0) vec3 position;
in layout(location=1) vec3 vertexColor;
uniform mat4 ultimateMatrix;
out vec3 theColor;
void main()
{
vec4 v = vec4(position, 1.0);
gl_Position = ultimateMatrix * v;
theColor = vertexColor;
}
FragmentShaderCode.glsl:
#version 430 //GLSL version your computer supports
out vec4 theColor2;
in vec3 theColor;
void main()
{
theColor2 = vec4(theColor, 1.0);
}
Functions: checkStatus, checkShaderStatus, checkProgramStatus, readShaderCode, installShaders should be all fine.
void keyboard() can be ignored since I havent implemented it yet(just for keyboard control).
I implemented the object "Ground" in sendDataToOpenGL(). But when I compiled and ran the program, "thread 1: exc_bad_access (code =1, address=0x0)" occured in the line of VAO:
And the pop-out window is just a white screen instead of a green grass(3d).
I have tried a method which was provided in other stackoverflow post: using glewExperimental = GL_TRUE;. I didnt see any errors by using that, but the popout screen vanished immediately just after it appeared. It seems that it couldnt help the problem.
Can someone give me a help? Thank you!
glGenVertexArrays is available in since OpenGL version 3.0. If vertex array objects are supported can be checked by glewGetExtension("GL_ARB_vertex_array_object").
Glew can enable additional extensions by glewExperimental = GL_TRUE;. See the GLEW documantation which says:
GLEW obtains information on the supported extensions from the graphics driver. Experimental or pre-release drivers, however, might not report every available extension through the standard mechanism, in which case GLEW will report it unsupported. To circumvent this situation, the glewExperimental global switch can be turned on by setting it to GL_TRUE before calling glewInit(), which ensures that all extensions with valid entry points will be exposed.
Add this to your code:
glewExperimental = GL_TRUE;
glewInit();

OpenGL Red Book with Mac OS X

I would like to work through the OpenGL Red Book, The OpenGL Programming Guide, 8th edition, using Xcode on Mac OS X.
I am unable to run the first code example, triangles.cpp. I have tried including the GLUT and GL frameworks that come with Xcode and I have searched around enough to see that I am not likely to figure this out on my own.
Assuming that I have a fresh installation of Mac OS X, and I have freshly installed Xcode with Xcode command-line tools, what are the step-by-step instructions to be able to run triangles.cpp in that environment?
Unlike this question, my preference would be not to use Cocoa, Objective-C or Swift. My preference would be to stay in C++/C only. An answer is only correct if I can follow it step-by-step and end up with a running triangles.cpp program.
My preference is Mac OS X 10.9, however a correct answer can assume 10.9, 10.10 or 10.11.
Thank you.
///////////////////////////////////////////////////////////////////////
//
// triangles.cpp
//
///////////////////////////////////////////////////////////////////////
#include <iostream>
using namespace std;
#include "vgl.h"
#include "LoadShader.h"
enum VAO_IDs { Triangles, NumVAOs };
enum Buffer_IDs { ArrayBuffer, NumBuffers };
enum Attrib_IDs { vPosition = 0 };
GLuint VAOs[NumVAOs];
GLuint Buffers[NumBuffers];
const GLuint NumVertices = 6;
//---------------------------------------------------------------------
//
// init
//
void
init(void)
{
glGenVertexArrays(NumVAOs, VAOs);
glBindVertexArray(VAOs[Triangles]);
GLfloat vertices[NumVertices][2] = {
{ -0.90, -0.90 }, // Triangle 1
{ 0.85, -0.90 },
{ -0.90, 0.85 },
{ 0.90, -0.85 }, // Triangle 2
{ 0.90, 0.90 },
{ -0.85, 0.90 }
};
glGenBuffers(NumBuffers, Buffers);
glBindBuffer(GL_ARRAY_BUFFER, Buffers[ArrayBuffer]);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices),
vertices, GL_STATIC_DRAW);
ShaderInfo shaders[] = {
{ GL_VERTEX_SHADER, "triangles.vert" },
{ GL_FRAGMENT_SHADER, "triangles.frag" },
{ GL_NONE, NULL }
};
GLuint program = LoadShaders(*shaders);
glUseProgram(program);
glVertexAttribPointer(vPosition, 2, GL_FLOAT,
GL_FALSE, 0, BUFFER_OFFSET(0));
glEnableVertexAttribArray(vPosition);
}
//---------------------------------------------------------------------
//
// display
//
void
display(void)
{
glClear(GL_COLOR_BUFFER_BIT);
glBindVertexArray(VAOs[Triangles]);
glDrawArrays(GL_TRIANGLES, 0, NumVertices);
glFlush();
}
//---------------------------------------------------------------------
//
// main
//
int
main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGBA);
glutInitWindowSize(512, 512);
glutInitContextVersion(4, 3);
glutInitContextProfile(GLUT_CORE_PROFILE);
glutCreateWindow(argv[0]);
glewExperimental = GL_TRUE;
if (glewInit()) {
cerr << "Unable to initialize GLEW ... exiting" << endl;
exit(EXIT_FAILURE);
}
init();
glutDisplayFunc(display);
glutMainLoop();
}
Edit 1: In response to the first comment, here is the naive effort.
Open Xcode 5.1.1 on Mac OS X 10.9.5
Create a new C++ Command-line project.
Paste over the contents of main.cpp with the contents of triangles.cpp.
Click on the project -> Build Phases -> Link Binary with Libraries
Add OpenGL.framework and GLUT.framework
Result: "/Users/xxx/Desktop/Triangles/Triangles/main.cpp:10:10: 'vgl.h' file not found"
Edit 2: Added the vgh translation unit and LoadShaders translation unit, also added libFreeGlut.a and libGlew32.a to my projects compilation/linking. Moved all of the OpenGL Book's Include contents to my projects source directory. Had to change several include statements to use quoted includes instead of angled includes. It feels like this is closer to working but it is unable to find LoadShader.h. Note that the translation unit in the OpenGL download is called LoadShaders (plural). Changing triangles.cpp to reference LoadShaders.h fixed the include problem but the contents of that translation unit don't seem to match the signatures of whats being called from triangles.cpp.
There are some issues with the source and with the files in oglpg-8th-edition.zip:
triangles.cpp uses non-standard GLUT functions that aren't included in glut, and instead are only part of the freeglut implementation (glutInitContextVersion and glutInitContextProfile). freeglut doesn't really support OS X and building it instead relies on additional X11 support. Instead of telling you how to do this I'm just going to modify the source to build with OS X's GLUT framework.
The code depends on glew, and the book's source download apparently doesn't include a binary you can use, so you'll need to build it for yourself.
Build GLEW with the following commands:
git clone git://git.code.sf.net/p/glew/code glew
cd glew
make extensions
make
Now:
Create a C++ command line Xcode project
Set the executable to link with the OpenGL and GLUT frameworks and the glew dylib you just built.
Modify the project "Header Search Paths" to include the location of the glew headers for the library you built, followed by the path to oglpg-8th-edition/include
Add oglpg-8th-edition/lib/LoadShaders.cpp to your xcode project
Paste the triangles.cpp source into the main.cpp of your Xcode project
Modify the source: replace #include "vgl.h" with:
#include <GL/glew.h>
#include <OpenGL/gl3.h>
#include <GLUT/glut.h>
#define BUFFER_OFFSET(x) ((const void*) (x))
Also make sure that the typos in the version of triangle.cpp that you include in your question are fixed: You include "LoadShader.h" when it should be "LoadShaders.h", and LoadShaders(*shaders); should be LoadShaders(shaders). (The code printed in my copy of the book doesn't contain these errors.)
Delete the calls to glutInitContextVersion and glutInitContextProfile.
Change the parameter to glutInitDisplayMode to GLUT_RGBA | GLUT_3_2_CORE_PROFILE
At this point the code builds, links, and runs, however running the program displays a black window for me instead of the expected triangles.
about fixing the black window issue as mentioned in Matthew and Bames53 comments
Follow bames53's answer
Define shader as string
const char *pTriangleVert =
"#version 410 core\n\
layout(location = 0) in vec4 vPosition;\n\
void\n\
main()\n\
{\n\
gl_Position= vPosition;\n\
}";
const char *pTriangleFrag =
"#version 410 core\n\
out vec4 fColor;\n\
void\n\
main()\n\
{\n\
fColor = vec4(0.0, 0.0, 1.0, 1.0);\n\
}";
OpenGl 4.1 supported on my iMac so i change version into 410
ShaderInfo shaders[] = {
{ GL_VERTEX_SHADER, pTriangleVert},
{ GL_FRAGMENT_SHADER, pTriangleFrag },
{ GL_NONE, NULL }
};
Modify the ShaderInfo struct slightly
change
typedef struct {
GLenum type;
const char* filename;
GLuint shader;
} ShaderInfo;
into
typedef struct {
GLenum type;
const char* source;
GLuint shader;
} ShaderInfo;
Modify loadShader function slightly
comment the code about reading shader from file
/*
const GLchar* source = ReadShader( entry->filename );
if ( source == NULL ) {
for ( entry = shaders; entry->type != GL_NONE; ++entry ) {
glDeleteShader( entry->shader );
entry->shader = 0;
}
return 0;
}
glShaderSource( shader, 1, &source, NULL );
delete [] source;*/
into
glShaderSource(shader, 1, &entry->source, NULL);
you'd better turning on DEBUG in case some shader compiling errors
you can use example from this link. It's almost the same. It uses glfw instead of glut.
http://www.tomdalling.com/blog/modern-opengl/01-getting-started-in-xcode-and-visual-cpp/
/*
main
Copyright 2012 Thomas Dalling - http://tomdalling.com/
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
//#include "platform.hpp"
// third-party libraries
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <glm/glm.hpp>
// standard C++ libraries
#include <cassert>
#include <iostream>
#include <stdexcept>
#include <cmath>
// tdogl classes
#include "Program.h"
// constants
const glm::vec2 SCREEN_SIZE(800, 600);
// globals
GLFWwindow* gWindow = NULL;
tdogl::Program* gProgram = NULL;
GLuint gVAO = 0;
GLuint gVBO = 0;
// loads the vertex shader and fragment shader, and links them to make the global gProgram
static void LoadShaders() {
std::vector<tdogl::Shader> shaders;
shaders.push_back(tdogl::Shader::shaderFromFile("vertex-shader.txt", GL_VERTEX_SHADER));
shaders.push_back(tdogl::Shader::shaderFromFile("fragment-shader.txt", GL_FRAGMENT_SHADER));
gProgram = new tdogl::Program(shaders);
}
// loads a triangle into the VAO global
static void LoadTriangle() {
// make and bind the VAO
glGenVertexArrays(1, &gVAO);
glBindVertexArray(gVAO);
// make and bind the VBO
glGenBuffers(1, &gVBO);
glBindBuffer(GL_ARRAY_BUFFER, gVBO);
// Put the three triangle verticies into the VBO
GLfloat vertexData[] = {
// X Y Z
0.0f, 0.8f, 0.0f,
-0.8f,-0.8f, 0.0f,
0.8f,-0.8f, 0.0f,
};
glBufferData(GL_ARRAY_BUFFER, sizeof(vertexData), vertexData, GL_STATIC_DRAW);
// connect the xyz to the "vert" attribute of the vertex shader
glEnableVertexAttribAxrray(gProgram->attrib("vert"));
glVertexAttribPointer(gProgram->attrib("vert"), 3, GL_FLOAT, GL_FALSE, 0, NULL);
// unbind the VBO and VAO
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
}
// draws a single frame
static void Render() {
// clear everything
glClearColor(0, 0, 0, 1); // black
glClear(GL_COLOR_BUFFER_BIT);
// bind the program (the shaders)
glUseProgram(gProgram->object());
// bind the VAO (the triangle)
glBindVertexArray(gVAO);
// draw the VAO
glDrawArrays(GL_TRIANGLES, 0, 3);
// unbind the VAO
glBindVertexArray(0);
// unbind the program
glUseProgram(0);
// swap the display buffers (displays what was just drawn)
glfwSwapBuffers(gWindow);
}
void OnError(int errorCode, const char* msg) {
throw std::runtime_error(msg);
}
// the program starts here
void AppMain() {
// initialise GLFW
glfwSetErrorCallback(OnError);
if(!glfwInit())
throw std::runtime_error("glfwInit failed");
// open a window with GLFW
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
gWindow = glfwCreateWindow((int)SCREEN_SIZE.x, (int)SCREEN_SIZE.y, "OpenGL Tutorial", NULL, NULL);
if(!gWindow)
throw std::runtime_error("glfwCreateWindow failed. Can your hardware handle OpenGL 3.2?");
// GLFW settings
glfwMakeContextCurrent(gWindow);
// initialise GLEW
glewExperimental = GL_TRUE; //stops glew crashing on OSX :-/
if(glewInit() != GLEW_OK)
throw std::runtime_error("glewInit failed");
// print out some info about the graphics drivers
std::cout << "OpenGL version: " << glGetString(GL_VERSION) << std::endl;
std::cout << "GLSL version: " << glGetString(GL_SHADING_LANGUAGE_VERSION) << std::endl;
std::cout << "Vendor: " << glGetString(GL_VENDOR) << std::endl;
std::cout << "Renderer: " << glGetString(GL_RENDERER) << std::endl;
// make sure OpenGL version 3.2 API is available
if(!GLEW_VERSION_3_2)
throw std::runtime_error("OpenGL 3.2 API is not available.");
// load vertex and fragment shaders into opengl
LoadShaders();
// create buffer and fill it with the points of the triangle
LoadTriangle();
// run while the window is open
while(!glfwWindowShouldClose(gWindow)){
// process pending events
glfwPollEvents();
// draw one frame
Render();
}
// clean up and exit
glfwTerminate();
}
int main(int argc, char *argv[]) {
try {
AppMain();
} catch (const std::exception& e){
std::cerr << "ERROR: " << e.what() << std::endl;
return EXIT_FAILURE;
}
return EXIT_SUCCESS;
}
I have adapted the project for MAC here:
https://github.com/badousuan/openGLredBook9th
The project can build successfully and most demo can run as expected. However the original code is based on openGL 4.5,while MAC only support version 4.1,some new API calls may fail. If some target not work well, you should consider this version issue and make some adaptation
I use the code from this tutorial: http://antongerdelan.net/opengl/hellotriangle.html, and it works on my mac.
Here is the code I run.
#include <GL/glew.h> // include GLEW and new version of GL on Windows
#include <GLFW/glfw3.h> // GLFW helper library
#include <stdio.h>
int main() {
// start GL context and O/S window using the GLFW helper library
if (!glfwInit()) {
fprintf(stderr, "ERROR: could not start GLFW3\n");
return 1;
}
// uncomment these lines if on Apple OS X
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(640, 480, "Hello Triangle", NULL, NULL);
if (!window) {
fprintf(stderr, "ERROR: could not open window with GLFW3\n");
glfwTerminate();
return 1;
}
glfwMakeContextCurrent(window);
// start GLEW extension handler
glewExperimental = GL_TRUE;
glewInit();
// get version info
const GLubyte* renderer = glGetString(GL_RENDERER); // get renderer string
const GLubyte* version = glGetString(GL_VERSION); // version as a string
printf("Renderer: %s\n", renderer);
printf("OpenGL version supported %s\n", version);
// tell GL to only draw onto a pixel if the shape is closer to the viewer
glEnable(GL_DEPTH_TEST); // enable depth-testing
glDepthFunc(GL_LESS); // depth-testing interprets a smaller value as "closer"
/* OTHER STUFF GOES HERE NEXT */
float points[] = {
0.0f, 0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
-0.5f, -0.5f, 0.0f
};
GLuint vbo = 0; // vertex buffer object
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, 9 * sizeof(float), points, GL_STATIC_DRAW);
GLuint vao = 0; // vertex array object
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, NULL);
const char* vertex_shader =
"#version 400\n"
"in vec3 vp;"
"void main() {"
" gl_Position = vec4(vp, 1.0);"
"}";
const char* fragment_shader =
"#version 400\n"
"out vec4 frag_colour;"
"void main() {"
" frag_colour = vec4(0.5, 0.0, 0.5, 1.0);"
"}";
GLuint vs = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vs, 1, &vertex_shader, NULL);
glCompileShader(vs);
GLuint fs = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fs, 1, &fragment_shader, NULL);
glCompileShader(fs);
GLuint shader_programme = glCreateProgram();
glAttachShader(shader_programme, fs);
glAttachShader(shader_programme, vs);
glLinkProgram(shader_programme);
while(!glfwWindowShouldClose(window)) {
// wipe the drawing surface clear
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glUseProgram(shader_programme);
glBindVertexArray(vao);
// draw points 0-3 from the currently bound VAO with current in-use shader
glDrawArrays(GL_TRIANGLES, 0, 3);
// update other events like input handling
glfwPollEvents();
// put the stuff we've been drawing onto the display
glfwSwapBuffers(window);
}
// close GL context and any other GLFW resources
glfwTerminate();
return 0;
}

OpenGL Draw Points with indeces

I was trying to draw a single point at 0,0 but my screen turns white and stops responding.
Can someone look into the code and see where I am making a mistake?
float *vertices;
GLubyte *pindices;
int width, height;
GLuint vboHandlePoints[1];
GLuint indexVBOPoints;
int numberOfPoints;
GLuint buf;
void initVboPoints(){
GLenum err = glewInit();
if (err != GLEW_OK)
{
fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
}
glGenBuffers(1, &vboHandlePoints[0]); // create a VBO handle
glBindBuffer(GL_ARRAY_BUFFER, vboHandlePoints[0]); // bind the handle to the current VBO
glBufferData(GL_ARRAY_BUFFER, 1 * 2 * 4, vertices, GL_STATIC_DRAW); // allocate space and copy the data over
glBindBuffer(GL_ARRAY_BUFFER, 0); // clean up
glGenBuffers(1, &indexVBOPoints);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexVBOPoints);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(GLubyte)*1 * 2, pindices, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); //clean up
}
void display(){
glClearColor(0, 1, 0, 1);
glClear(GL_COLOR_BUFFER_BIT);
glColor4f(0, 0, 0, 1);
glPointSize(5);
glEnableClientState(GL_VERTEX_ARRAY);
glBindBuffer(GL_ARRAY_BUFFER, vboHandlePoints[0]);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, indexVBOPoints);
glVertexPointer(2, GL_FLOAT, 0, 0);// 2-dimension
glDrawElements(GL_POINTS, 1, GL_UNSIGNED_BYTE, (char*)NULL + 0);
}
void initializeGlut(int argc, char** argv){
glutInit(&argc, argv);
width = 400;
height = 400;
glutInitDisplayMode(GLUT_RGB | GLUT_SINGLE);
glutInitWindowSize(width, height);
glutCreateWindow("Bhavya's Program");
glutDisplayFunc(display);
}
void initNormal(){
vertices = new float[2];
pindices = new GLubyte[1];
vertices[0] = 0;
vertices[1] = 0;
pindices[0] = 0;
}
int main(int argc, char** argv){
initNormal();
initializeGlut(argc, argv);
initVboPoints();
glutMainLoop();
return 0;
}
You have no GL_ARRAY_BUFFER bound at the time of the glVertexPointer call. All of the gl*Pointer functions reference the currently bound GL_ARRAY_BUFFER, the buffer name becomes part of the pointer. That way, you can source each attribute from a different VBO. The GL_ARRAY_BUFFER binding is totally irrelevant for the glDraw* family of functions.
Since you use deprecated legacy GL, binding VBO 0 is still valid, and your pointer references the client memory. So your application is likely to just crash because you told the GL to read the data at memory address 0...
The problem is that glFlush(); is required

Opengl will not display anything

I'm trying to get a small triangle to display.
Here is my initialization code:
void PlayerInit(Player P1)
{
glewExperimental = GL_TRUE;
glewInit();
//initialize buffers needed to draw the player
GLuint vao;
GLuint buffer;
glGenVertexArrays(1, &vao);
glGenBuffers(1, &buffer);
//bind the buffer and vertex objects
glBindVertexArray(vao);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
//Set up a buffer to hold 6 floats for position, and 9 floats for color
glBufferData(GL_ARRAY_BUFFER, sizeof(float)*18, NULL, GL_STATIC_DRAW);
//push the vertices of the player into the buffer
glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(float)*9, CalcPlayerPoints(P1.GetPosition()));
//push the color of each player vertex into the buffer
glBufferSubData(GL_ARRAY_BUFFER, sizeof(float)*9, sizeof(float)*9, CalcPlayerColor(1));
//create and compile the vertex/fragment shader objects
GLuint vs = create_shader("vshader.glsl" ,GL_VERTEX_SHADER);
GLuint fs = create_shader("fshader.glsl" ,GL_FRAGMENT_SHADER);
//create a program object and link the shaders
GLuint program = glCreateProgram();
glAttachShader(program, vs);
glAttachShader(program, fs);
glLinkProgram(program);
//error checking for linking
GLint linked;
glGetProgramiv(program, GL_LINK_STATUS, &linked);
if(!linked)
{
std::cerr<< "Shader program failed to link" <<std::endl;
GLint logSize;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logSize);
char* logMsg = new char[logSize];
glGetProgramInfoLog(program, logSize, NULL, logMsg);
std::cerr<< logMsg << std::endl;
delete [] logMsg;
exit(EXIT_FAILURE);
}
glUseProgram(program);
//create attributes for color and position to pass to shaders
//enable each attribute
GLuint Pos = glGetAttribLocation( program, "vPosition");
glEnableVertexAttribArray(Pos);
GLuint Col = glGetAttribLocation( program, "vColor");
glEnableVertexAttribArray(Col);
//set a pointer at the proper offset into the buffer for each attribute
glVertexAttribPointer(Pos, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) (sizeof(float)*0));
glVertexAttribPointer(Col, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) (sizeof(float)*9));
}
I don't have much experience writing shader linkers, so I think that is where the problem might be. I have some error checking in the shader loader, and nothing comes up. So I think that is fine.
Next I have my display and main function:
//display function for the game
void GameDisplay( void )
{
//set the background color
glClearColor(1.0, 0.0, 1.0, 1.0);
//clear the screen
glClear(GL_COLOR_BUFFER_BIT);
//Draw the Player
glDrawArrays(GL_TRIANGLES, 0, 3);
glutSwapBuffers();
}
//main function
int main(int argc, char** argv)
{
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE);
glutInitWindowSize(500, 500);
glutCreateWindow("Asteroids");
Player P1 = Player(0.0, 0.0);
PlayerInit(P1);
glutDisplayFunc(GameDisplay);
glutMainLoop();
return 0;
}
Vertex shader :
attribute vec3 vPosition;
attribute vec3 vColor;
varying vec4 color;
void main()
{
color = vec4(vColor, 1.0);
gl_Position = vec4(vPosition, 1.0);
}
Fragment shader
varying vec4 color;
void main()
{
gl_FragColor = color;
}
That's all of the relevant code. CalcPlayerPoints just returns a float array of size 9 to hold the triangle coordinates. CalcPlayerColor does something similar.
One last problem that may help with diagnosing the problem is that whenever I try to exit the program by closing the window of the application, I get a breakpoint in the glutmainloop, however if I close the console window, it exits fine.
Edit: I added the shaders for reference.
Edit: I am using opengl version 3.1
Without the shaders, we can't say if the faulty code isn't GLSL (bad vertices transformations, etc.)
Have you tried checking glGetError to see if the problem doesn't come from your initialization code ?
Maybe try to set the output of your fragment shader to, say, vec4(1.0, 0.0, 0.0, 1.0) to check if its normal output is ill-formed.
Your last problem seems to unveil an undefined behavior, like bad memory allocation/deallocation, which may take place in your Player class (by the way consider passing the object as a reference in your initialization code, because it may at the moment trigger a shallow copy and then a double-free of some pointer).
Turns out there was something wrong with how I was returning the array of vertices from the function used to compute them. The rest of the code worked fine after that fix.