GLSL Shader Draws Only Black Screen LWJGL - glsl

I am very new to shaders, and I got some GLSL code to compile properly (well without any compiler errors), and for some reason I keep getting a black screen. I am using LWJGL. I keep getting a black screen when I try to render a triangle, even though it should be white according to the color that I passed to the fragment shader. I've posted snippets of the code below, hopefully there's enough to figure out what the problem is.
Fragment Shader Source Code
void main()
{
gl_FragColor = vec4(1.0f, 1.0f, 1.0f, 1.0f);
{
Vertex Shader Source Code
void main()
{
gl_Position = ftransform();
}
Shader Reader Code
vertShaderString = shaderName + ".vert";
fragShaderString = shaderName + ".frag";
shader = GL20.glCreateProgram();
vertShader = GL20.glCreateShader(GL20.GL_VERTEX_SHADER);
try{
String temp;
BufferedReader reader = new BufferedReader(new FileReader(new File(vertShaderString)));
while ((temp = reader.readLine()) != null){
vertSource.append(temp).append("\n");
}
reader.close();
BufferedReader fragReader = new BufferedReader(new FileReader(new File(fragShaderString)));
String otherTemp;
while ((otherTemp = fragReader.readLine()) != null){
fragSource.append(otherTemp).append("\n");
}
fragReader.close();
}catch (Exception e){
e.printStackTrace();
}
Shader Setup Code
GL20.glShaderSource(vertShader, vertSource);
GL20.glCompileShader(vertShader);
if (GL20.glGetShaderi(vertShader, GL20.GL_COMPILE_STATUS) == GL11.GL_FALSE){
System.err.println("Failed to compile vertex shader");
}
GL20.glShaderSource(fragShader, fragSource);
GL20.glCompileShader(fragShader);
if (GL20.glGetShaderi(fragShader, GL20.GL_COMPILE_STATUS) == GL11.GL_FALSE){
System.err.println("Failed to compile fragment shader");
}
GL20.glAttachShader(shader, vertShader);
GL20.glAttachShader(shader, fragShader);
GL20.glLinkProgram(shader);
GL20.glValidateProgram(shader);
Enable and Disable Shader Code
public void begin(){
GL20.glUseProgram(shader);
}
public void end(){
GL20.glUseProgram(0);
}
Render Method
public void render(){
GL11.glClearColor(0, 0, 0, 1);
GL11.glClear(GL11.GL_COLOR_BUFFER_BIT);
GL11.glColor4f(1, 0, 0, 1);
shader.begin();
GL11.glBegin(GL11.GL_TRIANGLES);
GL11.glVertex2i(0, 0);
GL11.glVertex2i(500, 0);
GL11.glVertex2i(250, 250);
GL11.glEnd();
shader.end();
}

The problem has been solved, thanks to jozxyqk. I had forgotten to initialize the fragment shader variable.
fragShader = GL20.glCreateShader(GL20.GL_FRAGMENT_SHADER);
By adding this line of code the problem was solved.

Related

Unable to get Tessallation shader working

I've just started following OpenGL SuperBible 7th ed, and translating the examples into LWJGL, but have become stuck on the tessellation shader. In the following program there is the line " //IF THESE TWO LINES..." if the following two lines are commented out then the vertex and fragment shaders work but when the control.tess.glsl and eval.tess.glsl are included then the triangle no longer renders.
I've uploaded my program onto github but will reproduce the code here as well:
package com.ch3vertpipeline;
public class App {
public static void main(String [] args){
LwjglSetup setup = new LwjglSetup();
setup.run();
}
}
package com.ch3vertpipeline;
import java.nio.IntBuffer;
import java.util.Scanner;
import org.lwjgl.*;
import org.lwjgl.glfw.*;
import org.lwjgl.opengl.*;
import org.lwjgl.system.*;
import static org.lwjgl.glfw.Callbacks.*;
import static org.lwjgl.glfw.GLFW.*;
import static org.lwjgl.opengl.GL11.*;
import static org.lwjgl.opengl.GL20.*;
import static org.lwjgl.opengl.GL30.*;
import static org.lwjgl.system.MemoryStack.stackPush;
import static org.lwjgl.system.MemoryUtil.NULL;
public class LwjglSetup {
private long window;
private int vertex_shader;
private int fragment_shader;
private int tess_control_shader;
private int tess_evaluation_shader;
private int program;
private int vertex_array_object;
public LwjglSetup() {
}
private void init() {
GLFWErrorCallback.createPrint(System.err).set();
if (!glfwInit()) {
throw new IllegalStateException("Unable to initialize GLFW");
}
// Configure GLFW
glfwDefaultWindowHints(); // optional, the current window hints are already the default
glfwWindowHint(GLFW_VISIBLE, GLFW_FALSE); // the window will stay hidden after creation
glfwWindowHint(GLFW_RESIZABLE, GLFW_TRUE); // the window will be resizable
// Create the window
window = glfwCreateWindow(300, 300, "Hello World!", NULL, NULL);
if (window == NULL) {
throw new RuntimeException("Failed to create the GLFW window");
}
// Setup a key callback. It will be called every time a key is pressed, repeated or released.
glfwSetKeyCallback(window, (window, key, scancode, action, mods) -> {
if (key == GLFW_KEY_ESCAPE && action == GLFW_RELEASE) {
glfwSetWindowShouldClose(window, true); // We will detect this in the rendering loop
}
});
// Get the thread stack and push a new frame
try (MemoryStack stack = stackPush()) {
IntBuffer pWidth = stack.mallocInt(1); // int*
IntBuffer pHeight = stack.mallocInt(1); // int*
// Get the window size passed to glfwCreateWindow
glfwGetWindowSize(window, pWidth, pHeight);
// Get the resolution of the primary monitor
GLFWVidMode vidmode = glfwGetVideoMode(glfwGetPrimaryMonitor());
// Center the window
glfwSetWindowPos(
window,
(vidmode.width() - pWidth.get(0)) / 2,
(vidmode.height() - pHeight.get(0)) / 2
);
} // the stack frame is popped automatically
// Make the OpenGL context current
glfwMakeContextCurrent(window);
// Enable v-sync
glfwSwapInterval(1);
// Make the window visible
glfwShowWindow(window);
}
public void run() {
System.out.println("Hello LWJGL " + Version.getVersion() + "!");
init();
loop();
// Free the window callbacks and destroy the window
glfwFreeCallbacks(window);
glfwDestroyWindow(window);
// Terminate GLFW and free the error callback
glfwTerminate();
glfwSetErrorCallback(null).free();
}
private void loop() {
GL.createCapabilities();//Critical
System.out.println("OpenGL Verion: " + glGetString(GL_VERSION));
this.compileShader();
vertex_array_object = glGenVertexArrays();
glBindVertexArray(vertex_array_object);
while (!glfwWindowShouldClose(window)) {
double curTime = System.currentTimeMillis() / 1000.0;
double slowerTime = curTime;//assigned direcly but I was applying a factor here
final float colour[] = {
(float) Math.sin(slowerTime) * 0.5f + 0.5f,
(float) Math.cos(slowerTime) * 0.5f + 0.5f,
0.0f, 1.0f};
glClearBufferfv(GL_COLOR, 0, colour);
glUseProgram(program);
final float attrib[] = {
(float) Math.sin(slowerTime) * 0.5f,
(float) Math.cos(slowerTime) * 0.6f,
0.0f, 0.0f};
//glPatchParameteri(GL_PATCH_VERTICES, 3);//this is the default so is unneeded
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
glVertexAttrib4fv(0, attrib);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(window); // swap the color buffers
glfwPollEvents();
}
glDeleteVertexArrays(vertex_array_object);
glDeleteProgram(program);
}
private String readFileAsString(String filename) {
String next = new Scanner(LwjglSetup.class.getResourceAsStream(filename), "UTF-8").useDelimiter("\\A").next();
System.out.println("readFileAsString: " + next);
return next;
}
private void compileShader() {
//int program;
//NEW CODE
//create and compile vertex shader
String vertShaderSource = readFileAsString("/vert.glsl");
vertex_shader = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vertex_shader, vertShaderSource);
glCompileShader(vertex_shader);
//check compilation
if (glGetShaderi(vertex_shader, GL_COMPILE_STATUS) != 1) {
System.err.println(glGetShaderInfoLog(vertex_shader));
System.exit(1);
}
//create and compile fragment shader
String fragShaderSource = readFileAsString("/frag.glsl");
fragment_shader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fragment_shader, fragShaderSource);
glCompileShader(fragment_shader);
//check compilation
if (glGetShaderi(fragment_shader, GL_COMPILE_STATUS) != 1) {
System.err.println(glGetShaderInfoLog(fragment_shader));
System.exit(1);
}
//create and compile tessellation shader
String tessControlShaderSource = readFileAsString("/control.tess.glsl");
tess_control_shader = glCreateShader(GL40.GL_TESS_CONTROL_SHADER);
glShaderSource(tess_control_shader, tessControlShaderSource);
glCompileShader(tess_control_shader);
//check compilation
if (glGetShaderi(tess_control_shader, GL_COMPILE_STATUS) != 1) {
System.err.println(glGetShaderInfoLog(tess_control_shader));
System.exit(1);
}
//create and compile tessellation shader
String tessEvaluationShaderSource = readFileAsString("/eval.tess.glsl");
tess_evaluation_shader = glCreateShader(GL40.GL_TESS_EVALUATION_SHADER);
glShaderSource(tess_evaluation_shader, tessEvaluationShaderSource);
glCompileShader(tess_evaluation_shader);
//check compilation
if (glGetShaderi(tess_evaluation_shader, GL_COMPILE_STATUS) != 1) {
System.err.println(glGetShaderInfoLog(tess_evaluation_shader));
System.exit(1);
}
//create program and attach it
program = glCreateProgram();
glAttachShader(program, vertex_shader);
glAttachShader(program, fragment_shader);
//IF THESE TWO LINES ARE COMMENTED PROGRAM WORKS...although there
//is no tessallation...
glAttachShader(program, tess_control_shader);
glAttachShader(program, tess_evaluation_shader);
glLinkProgram(program);
//check link
if (glGetProgrami(program, GL_LINK_STATUS) != 1) {
System.err.println(glGetProgramInfoLog(program));
System.exit(1);
}
glValidateProgram(program);
if (glGetProgrami(program, GL_VALIDATE_STATUS) != 1) {
System.err.println(glGetProgramInfoLog(program));
System.exit(1);
}
//delete shaders as the program has them now
glDeleteShader(vertex_shader);
glDeleteShader(fragment_shader);
glDeleteShader(tess_control_shader);
glDeleteShader(tess_evaluation_shader);
//return program;
}
}
vert.glsl
#version 440 core
//'offset' is an input vertex attribute
layout (location=0) in vec4 offset;
layout (location=1) in vec4 color;
out vec4 vs_color;
void main(void)
{
const vec4 vertices[3] = vec4[3]( vec4( 0.25, -0.25, 0.5, 1.0),
vec4(-0.25, -0.25, 0.5, 1.0),
vec4( 0.25, 0.25, 0.5, 1.0));
//Add 'offset' to hour hard-coded vertex position
gl_Position = vertices[gl_VertexID] + offset;
//Output a fixed value for vs_color
vs_color = color;
}
frag.glsl
#version 440 core
in vec4 vs_color;
out vec4 color;
void main(void)
{
color = vs_color;
}
control.tess.glsl
#version 440 core
layout (vertices=3) out;
void main(void)
{
//Only if I am invocation 0
if (gl_InvocationID == 0){
gl_TessLevelInner[0] = 5.0;
gl_TessLevelOuter[0] = 5.0;
gl_TessLevelOuter[1] = 5.0;
gl_TessLevelOuter[2] = 5.0;
}
//Everybody copies their input to their output?
gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;
}
eval.tess.glsl
#version 440 core
layout (triangles, equal_spacing, cw) in;
void main(void){
gl_Position = (gl_TessCoord.x * gl_in[0].gl_Position) +
(gl_TessCoord.y * gl_in[1].gl_Position) +
(gl_TessCoord.z * gl_in[2].gl_Position);
}
Finally, if it helps here is some version information, which is printed at the start of the application:
Hello LWJGL 3.1.5 build 1!
OpenGL Verion: 4.4.0 NVIDIA 340.107
glDrawArrays(GL_TRIANGLES, 0, 3);
When you draw something with tessellation, you are drawing patches, not triangles. Hence, you have to specify GL_PATCHES:
glDrawArrays(GL_PATCHES, 0, 3);
//Everybody copies their input to their output?
The reason is that the input vertices and output vertices of the tessellation control shader are not related to each other. The input vertices are taken from the input stream, i.e. your vertex buffers (after being processed by the vertex shader). Their number is specified by the GL_PATCH_VERTICES parameter. Each invocation takes this number of vertices from the buffer. The output vertices are kept internally in the pipeline. Their number is specified by the layout directive. This number can be different from the number of input vertices. They can also have different attributes. I find it more intuitive to think of these vertices as pieces of data instead of actual vertices with a geometric meaning. In some cases, this interpretation might make sense, but definitely not in all.

Vertex shader not compiling due to a non-Ascii character?

So I started using OpenGL with glew and GLFW to create a game engine, and I almost immediately ran into a problem when starting working with shaders:
They are not being used or have no effect whatsoever if they are being used.
I have been checking my code with plenty of other examples, and they all match up, nothing looks out of place, and I am starting to run out of ideas and patience (I have been trying to figure out why for nearly a month now) with this.
My main core code is here:
#include "headers/Default.hpp"
//Window width and height variables
int windowWidth = 800;
int windowHeight = 600;
float Aspect = (float)windowWidth / (float)windowHeight;
//Buffer width and buffer height
int bufferWidth;
int bufferHeight;
double deltaTime;
double currentTime;
double newTime;
void CalculateDelta()
{
newTime = glfwGetTime();
deltaTime = newTime - currentTime;
currentTime = newTime;
}
//A call back function to get the window size
void UpdateWindowSize(GLFWwindow* window, int width, int height)
{
windowWidth = width;
windowHeight = height;
Aspect = (float)windowWidth / (float)windowHeight;
}
void UpdateFrameBufferSize(GLFWwindow* window, int width, int height)
{
bufferWidth = width;
bufferHeight = height;
}
//Starts on startup and creates an window context and starts the rendering loop
int main()
{
//Creates an engine startup log to keep
CreateStartupLog();
if (!glewInit())
{
WriteStartupLog("ERROR: GLEW failed to start\n");
return 1;
}
else
{
WriteStartupLog("INFO: GLEW initiated!\n");
}
//If glfw is not initiated for whatever reason we return an error
if (!glfwInit())
{
WriteStartupLog("ERROR: GLFW failed to start\n");
return 1;
}
else
{
WriteStartupLog("INFO: GLFW initiated!\n");
}
////////////////////////////////////////////////////////////////
// Window Section //
////////////////////////////////////////////////////////////////
//glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
//glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
//glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
//Gets the primary monitor of the PC and tells OpenGL to use that monitor
GLFWmonitor* monitor = glfwGetPrimaryMonitor();
const GLFWvidmode* videoMode = glfwGetVideoMode(monitor);
//Creates a GLFW window context that we can work with
GLFWwindow* gameWindow = glfwCreateWindow(windowWidth/*videoMode->width*/, windowHeight/*videoMode->height*/, "FireTech Engine", NULL/*monitor*/, NULL);
//If the game window is not able to be created, prints an error and terminates the program
if (!gameWindow)
{
WriteStartupLog("ERROR: GLFW could not create a window\n");
glfwTerminate();
return 1;
}
else
{
WriteStartupLog("INFO: GLFW created a window!\n\n");
}
//Makes the current context
glfwMakeContextCurrent(gameWindow);
//Sets the window callback function for size
glfwSetWindowSizeCallback(gameWindow, UpdateWindowSize);
glfwSetFramebufferSizeCallback(gameWindow, UpdateFrameBufferSize);
//Initiate GLEW
glewExperimental = GL_TRUE;
glewInit();
////////////////////////////////////////////////////////////////
// Functions to set up various systems of the game engine //
////////////////////////////////////////////////////////////////
//Calls function to create a log file for the game engine
CreateEngineLog();
//Calls the function to compile the default shaders
CompileDefaultShader();
//Calls the function to get and print out hardware and OpenGL version
//PrintHardwareInfo();
////////////////////////////////////////////////////////////////
// Game Code //
////////////////////////////////////////////////////////////////
Sprite testSprite;
//Rendering loop
while (!glfwWindowShouldClose(gameWindow))
{
CalculateDelta();
glClearColor(0.3, 0.6, 1.0, 0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
//Viewport and ortho settings
glViewport(0, 0, windowWidth, windowHeight);
glOrtho(-1, 1, -1 / Aspect, 1 / Aspect, 0, 1);
//Draw a sprite
if (GLFW_PRESS == glfwGetKey(gameWindow, GLFW_KEY_F2))
{
testSprite.DebugDraw();
}
else
{
testSprite.Draw();
}
//Draws the stuff we just rendered
glfwSwapBuffers(gameWindow);
glLoadIdentity();
//Polls different events, like input for example
glfwPollEvents();
if (GLFW_PRESS == glfwGetKey(gameWindow, GLFW_KEY_F1))
{
int fps = GetFPS();
printf("FPS: ");
printf("%d\n", fps);
printf("Frequency: ");
printf("%f\n", 1/double(fps));
}
if (GLFW_PRESS == glfwGetKey(gameWindow, GLFW_KEY_ESCAPE))
{
glfwSetWindowShouldClose(gameWindow, 1);
}
}
glfwTerminate();
WriteEngineLog("PROGRAM EXITED: Window closed");
return 0;
}
Here is the shader.cpp code:
#include "../headers/Default.hpp"
string ReadShaderFile(char* path)
{
ifstream shaderFile;
shaderFile.open(path, std::ifstream::in);
string output;
if (shaderFile.is_open())
{
printf("Opened shader file located at: \"%s\"\n", path);
while (!shaderFile.eof())
{
output += shaderFile.get();
}
printf("Successfully read shader file located at: \"%s\"\n", path);
}
else
{
WriteEngineLog("ERROR: Could not read shader file!\n");
}
shaderFile.close();
return output;
}
Shader::Shader()
{
WriteEngineLog("WARNING: There was no path to any GLSL Shader files\n");
}
Shader::Shader(char* VertexShaderPathIn, char* FragmentShaderPathIn)
{
string vertexShaderString = ReadShaderFile(VertexShaderPathIn);
string fragmentShaderString = ReadShaderFile(FragmentShaderPathIn);
//Prints out the string to show the shader's code
printf("\n%s\n", vertexShaderString.c_str());
printf("\n%s\n", fragmentShaderString.c_str());
//Creates the GLchars needed to input the shader code
const GLchar* vertex_shader = vertexShaderString.c_str();
const GLchar* fragment_shader = fragmentShaderString.c_str();
//Creates a vertex shader and compiles it
GLuint vertexShader = glCreateShader(GL_VERTEX_SHADER);
WriteEngineLog("Blank vertex shader created\n");
glShaderSource(vertexShader, 1, &vertex_shader, NULL);
WriteEngineLog("Vertex shader given source\n");
glCompileShader(vertexShader);
//Compilation error checking begions here
GLint isVertexCompiled = 0;
glGetShaderiv(vertexShader, GL_COMPILE_STATUS, &isVertexCompiled);
if (isVertexCompiled == GL_FALSE)
{
//Gets the length of the log
GLint maxLength = 0;
glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH, &maxLength);
//Creates and writes the log to the errorLog
GLchar* errorLog = (GLchar*)malloc(maxLength);
glGetShaderInfoLog(vertexShader, maxLength, &maxLength, &errorLog[0]);
//Writes to the engine log with the shader error
WriteEngineLog("ERROR: Vertex shader failed to compile!\n");
printf("%s\n", (char*)errorLog);
//Frees the errorLog allocation
free(errorLog);
//Deletes the shader so it doesn't leak
glDeleteShader(vertexShader);
WriteEngineLog("ERROR: Aborting shader creation.\n");
return;
}
//Writes in the engine log to report successful compilation
WriteEngineLog("Vertex shader successfully compiled!\n");
//Creates a fragment shader
GLuint fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
WriteEngineLog("Blank fragment shader created\n");
glShaderSource(fragmentShader, 1, &fragment_shader, NULL);
WriteEngineLog("Fragment shader given source\n");
glCompileShader(fragmentShader);
//Compilation error checking begions here
GLint isFragmentCompiled = 0;
glGetShaderiv(fragmentShader, GL_COMPILE_STATUS, &isFragmentCompiled);
if (isFragmentCompiled == GL_FALSE)
{
//Gets the length of the log
GLint maxLength = 0;
glGetShaderiv(vertexShader, GL_INFO_LOG_LENGTH, &maxLength);
//Creates and writes the log to the errorLog
GLchar* errorLog = (GLchar*)malloc(maxLength);
glGetShaderInfoLog(vertexShader, maxLength, &maxLength, &errorLog[0]);
WriteEngineLog("ERROR: Fragment shader failed to compile\n");
printf("%s\n", (char*)errorLog);
//Frees the errorLog allocation
free(errorLog);
//Deletes the shader so it doesn't leak
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
WriteEngineLog("ERROR: Aborting shader creation.\n");
return;
}
//Writes in the engine log to report successful compilation
WriteEngineLog("Fragment shader successfully compiled!\n");
//Creates the final shader product
this->Program = glCreateProgram();
WriteEngineLog("Blank shader created\n");
glAttachShader(this->Program, vertexShader);
WriteEngineLog("Attatched Vertex shader to the shader\n");
glAttachShader(this->Program, fragmentShader);
WriteEngineLog("Attatched Fragment shader to the shader\n");
glLinkProgram(this->Program);
/*GLint isLinked = 0;
glGetProgramiv(this->Program, GL_LINK_STATUS, (int*)&isLinked);
if (isLinked == GL_FALSE)
{
//Gets the lngth of the shader info log
GLint maxLength = 0;
glGetProgramInfolog(ShaderOut, GL_INFO_LOG_LENGTH, &maxLength);
//Gets and puts the actual log into a GLchar
std::vector<GLchar> infoLog(maxLength);
glGetProgramInfoLog(ShaderOut, maxLength, &maxLength, &infoLog[0]);
//Deletes programs and shaders so they don't leak
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
WriteEngineLog((string)infoLog);
return;
}*/
WriteEngineLog("Shader linked!\n\n");
WriteEngineLog("INFO: Shader created!\n");
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
}
void Shader::Use()
{
glUseProgram(this->Program);
}
Here is the quad.cpp code:
#include "../headers/Default.hpp"
Quad::Quad()
{
position.x = 0;
position.y = 0;
scale.x = 1;
scale.y = 1;
VertexArray = CreateVertexArray();
}
//Quad constructor with one arg
Quad::Quad(Vector2 Position)
{
position = Position;
VertexArray = CreateVertexArray();
}
//Quad constructor with two args
Quad::Quad(Vector2 Position, Vector2 Scale)
{
position = Position;
scale = Scale;
VertexArray = CreateVertexArray();
}
GLuint Quad::CreateVertexArray()
{
GLfloat Vertices[] =
{
//VERTICES //COLORS //TEXCOORDS
0.5f, 0.5f, 0.0f, 0.0f, 0.0f, 0.0f, //1.0f, 1.0f, //Top Right Vertice
0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 0.0f, //1.0f, 0.0f, //Top Left Vertice
-0.5f, -0.5f, 0.0f, 0.0f, 0.0f, 1.0f//, 0.0f, 0.0f //Bottom Left Vertice
};
GLuint vbo, vao;
glGenVertexArrays(1, &vao);
glGenBuffers(1, &vbo);
glBindVertexArray(vao);
//Copy vertices into the buffer
glBindBuffer(GL_ARRAY_BUFFER, vbo);
glBufferData(GL_ARRAY_BUFFER, sizeof(Vertices), Vertices, GL_STATIC_DRAW);
//Attribute Pointers
//Position attribute
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)0);
glEnableVertexAttribArray(0);
//Color attribute
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, 6 * sizeof(GLfloat), (GLvoid*)(3 * sizeof(GLfloat)));
glEnableVertexAttribArray(1);
//Unbinds the VAO
glBindVertexArray(0);
return vao;
}
//Quad debug drawing function
void Quad::DebugDraw()
{
//Use the default shader
DefaultShader.Use();
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
glBindVertexArray(VertexArray);
// draw points 0-3 from the currently bound VAO with current in-use shader
glDrawArrays(GL_TRIANGLES, 0, 3);
//glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0); //CAUSING A CRASH AT THE MOMENT
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
//Unbinds the VAO
glBindVertexArray(0);
}
Here is the sprite.cpp code:
#include "../headers/Default.hpp"
Sprite::Sprite()
{
position.x = 0;
position.y = 0;
}
Sprite::Sprite(Texture tex)
{
defaultTexture = tex;
currentTexture = tex;
}
Sprite::Sprite(Texture tex, Vector2 pos)
{
defaultTexture = tex;
currentTexture = tex;
position = pos;
}
Sprite::Sprite(Texture tex, Vector2 pos, Vector2 Scale)
{
defaultTexture = tex;
currentTexture = tex;
position = pos;
scale = Scale;
}
void Sprite::Draw()
{
//Binds the default shader again
glBindVertexArray(VertexArray);
//Use the default shader
DefaultShader.Use();
// draw points 0-3 from the currently bound VAO with current in-use shader
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);
}
Here is my vertex shader and fragment shader code (In order):
//Vertex Shader
#version 330 core
layout (location = 0) in vec3 position; // The position variable has attribute position 0
layout (location = 1) in vec3 color;
out vec3 ourColor;
void main()
{
gl_Position = vec4(position, 1.0f); // See how we directly give a vec3 to vec4's constructor
ourColor = color;
}
//Fragment shader
#version 330 core
in vec3 ourColor;
out vec4 color;
void main()
{
color = ourColor;
}
And I'm getting a warning that my shader did not compile... error is that there is a non ascii character at line ZERO of the vertex shader.
I had exactly the same error. This is almost certainly due to Unicode Byte Order Marks, or similar unprinted characters generated by text editors.
These are common in the first characters of a unicode file, but can occur anywhere.
You can programmatically strip these from your shader source strings before compiling, but this could be costly if you are compiling many shaders. See the above link for the data to strip if you go this route.
An alternative is simply to keep the files in ANSI/ASCII format. I am sure most text editors have the facility to set/convert formats, but I will give Notepad++ as an example since it's what I use to edit GLSL:
Open the GLSL file.
Encoding -> Convert to ANSI. (Note that merely hitting "Encode in ANSI" will not strip the characters)
Save the file.
The above should also strip other characters prone to confusing GLSL parsers (and C/C++ in general).
You could inform the user(/developer) the files are in an incorrect format on load in debug builds.

using openGL ES from C++: error 1281 on glUseProgram

I am trying to move some openGL processing to a C++ class, which is wrapped in an Objective-C class for use with iOS. Most of it seems to work, but I'm not getting the rendering into the frame buffer. When I bracket every openGL call with glGetError() - both in the Objective-C wrapper and the C++ class - I get an error 1281 (GL_INVALID_VALUE) upon calling glUseProgram (from within the C++ method renderTextures.)
(FWIW, this is then followed by GL_INVALID_OPERATION (1282) on two subsequent calls: glUniform1i and glUniformMatrix4fv, which I suppose makes sense if these are associated with the shader program. P.S. I used a custom wrapper function on glGetError that loops until the return value is zero - these are the only three errors I get.)
I can set and retrieve arbitrary values from the frame buffer (using glClearColor and glClear to set them, and glReadPixels to retrieve them), so the frame buffer seems to be set up OK. But the rendering (via glDrawElements) seems to fail, and I am supposing this is related to the error I get on glUseProgram. Notice that the argument _program for glUseProgram gets passed in from the Objective-C wrapper, via the call to MyClass::renderTextures. The value is the same (it's just a handle, right?) but the call fails inside the C++ class.
So... any ideas why glUseProgram fails? Is it how I set up the argument _program? That I'm passing it from Objective-C to C++? (Something about losing access to the context from inside the C++?) Something else that anyone can see?
Code follows below (much based on boilerplate from Xcode)
Objective-C wrapper:
#import “MyClass.h”
// OBJECTIVE-C WRAPPER CLASS
#interface ObjCWrapperClass () {
MyClass *_myObject;
GLuint _program;
GLint _mvpUniform;
GLint _textureUniform;
GLKMatrix4 _modelViewProjectionMatrix;
}
#property EAGLContext *myContext;
#end
#implementation ObjCWrapperClass
-(id)init {
if (self = [super init]) {
self.myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
_myObject = new MyClass();
BOOL result = [self loadShaders];
}
return self;
}
-(void)doRender {
// Has to be in Objective-C
[EAGLContext setCurrentContext:self.queryContext];
// ---- Use C++ ------------------------------
// 1. Create frame buffer
_myObject->createFrameBuffer();
// 2. Get Texture List
_myObject->createTextureList();
// 3. Create the Texture Geometry
_myObject->createTextureGeometry();
// 4. Load textures
_myObject->loadTextures();
if ([NSThread isMainThread]) {
[self doRenderInCPP];
}
else {
dispatch_sync(dispatch_get_main_queue(), ^{
[self doRenderInCPP];
} );
}
_myObject->deleteTextures();
// ---- END C++ ------------------------------
}
-(void)doRenderInCPP
{
// Render textures into framebuffer
_myObject->renderTextures(_program, _mvpUniform, _textureUniform);
}
#pragma mark - OpenGL ES 2 shader compilation
- (BOOL)loadShaders
{
GLuint vertShader, fragShader;
NSString *vertShaderPathname, *fragShaderPathname;
// Create shader program.
_program = glCreateProgram();
// Create and compile vertex shader.
vertShaderPathname = [[NSBundle mainBundle] pathForResource:#"Shader" ofType:#"vsh"];
if (![self compileShader:&vertShader type:GL_VERTEX_SHADER file:vertShaderPathname]) {
NSLog(#"Failed to compile vertex shader");
return NO;
}
// Create and compile fragment shader.
fragShaderPathname = [[NSBundle mainBundle] pathForResource:#“Shader" ofType:#"fsh"];
if (![self compileShader:&fragShader type:GL_FRAGMENT_SHADER file:fragShaderPathname]) {
NSLog(#"Failed to compile fragment shader");
return NO;
}
// Attach vertex shader to program.
glAttachShader(_program, vertShader);
// Attach fragment shader to program.
glAttachShader(_program, fragShader);
// Bind attribute locations.
// This needs to be done prior to linking.
glBindAttribLocation(_program, GLKVertexAttribPosition, "position");
glBindAttribLocation(_program, GLKVertexAttribTexCoord0, "texCoord");
// Link program.
if (![self linkProgram:_program]) {
NSLog(#"Failed to link program: %d", _program);
if (vertShader) {
glDeleteShader(vertShader);
vertShader = 0;
}
if (fragShader) {
glDeleteShader(fragShader);
fragShader = 0;
}
if (_program) {
glDeleteProgram(_program);
_program = 0;
}
return NO;
}
// Get uniform locations.
_mvpUniform = glGetUniformLocation(_program, "modelViewProjectionMatrix");
_textureUniform = glGetUniformLocation(_program, "tileTexture");
// Release vertex and fragment shaders.
if (vertShader) {
glDetachShader(_program, vertShader);
glDeleteShader(vertShader);
}
if (fragShader) {
glDetachShader(_program, fragShader);
glDeleteShader(fragShader);
}
return YES;
}
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file
{
GLint status;
const GLchar *source;
source = (GLchar *)[[NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:nil] UTF8String];
if (!source) {
NSLog(#"Failed to load vertex shader");
return NO;
}
*shader = glCreateShader(type);
glShaderSource(*shader, 1, &source, NULL);
glCompileShader(*shader);
#if defined(DEBUG)
GLint logLength;
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetShaderInfoLog(*shader, logLength, &logLength, log);
NSLog(#"Shader compile log:\n%s", log);
free(log);
}
#endif
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
if (status == 0) {
glDeleteShader(*shader);
return NO;
}
return YES;
}
- (BOOL)linkProgram:(GLuint)prog
{
GLint status;
glLinkProgram(prog);
#if defined(DEBUG)
GLint logLength;
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar *log = (GLchar *)malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
NSLog(#"Program link log:\n%s", log);
free(log);
}
#endif
glGetProgramiv(prog, GL_LINK_STATUS, &status);
if (status == 0) {
return NO;
}
return YES;
}
#end
C++ (Relevant bits):
//
// MyClass.cpp
//
#include “MyClass.h”
void MyClass::createFrameBuffer()
{
glGenFramebuffers(1, &_frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _frameBuffer);
// Create the texture:
glGenTextures(1, &_frameBufferTexture);
glBindTexture(GL_TEXTURE_2D, _frameBufferTexture);
glTexImage2D(GL_TEXTURE_2D, 0, _drawFormatEnum, _destinationSizeWidth, _destinationSizeHeight, 0, _drawFormatEnum, GL_UNSIGNED_BYTE, NULL);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, _frameBufferTexture, 0);
GLenum error = glGetError();
if (error != 0) {
printf("Error Creating Depth Buffer: %i (backing size: %i %i)\n", error, _destinationSizeWidth, _destinationSizeHeight);
}
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
{
printf("Failed to make complete framebuffer object %x\n", glCheckFramebufferStatus(GL_FRAMEBUFFER));
}
glClearColor(0.015625, 0.03125, 0.0, 1.0); // For testing - put distinctive values in to see if we find these in Framebuffer
glClear(GL_COLOR_BUFFER_BIT);
}
void MyClass::renderTextures(GLint program, GLint mvpUniform, GLint textureUniform)
{
// Clear the draw buffer
glClearColor(0.0, 0.0, 0.0625, 1.0); // TEST: clear to distinctive values
glClear(GL_COLOR_BUFFER_BIT);
// Draw each segment in a different area of frame buffer
for (int segment_index = 0; segment_index < _numSegments; segment_index++) {
// Set draw region
glScissor(segment_index*(_segmentWidthPixels), 0, _segmentWidthPixels, _segmentHeightPixels);
glEnable(GL_SCISSOR_TEST);
int segment_horz_offset = getSegmentHorzOffset(segment_index);
int segment_vert_offset = getSegmentVertOffset(segment_index);
FFGLKMatrix4 modelViewProjectionMatrix = createMVPmatrix(segment_horz_offset, segment_vert_offset);
// Render the object ES2
glUseProgram(program); // Error after glUseProgram:, GL_INVALID_VALUE (1281)
glUniform1i(textureUniform, 0); //GL_INVALID_OPERATION (1282)
glUniformMatrix4fv(mvpUniform, 1, 0, modelViewProjectionMatrix.m); //GL_INVALID_OPERATION (1282)
glEnableVertexAttribArray(FFGLKVertexAttribPosition);
glEnableVertexAttribArray(FFGLKVertexAttribTexCoord0);
glActiveTexture(GL_TEXTURE0);
for (auto &texture: _textures) {
uint8_t *data = (uint8_t *)texture.geometryData;
glVertexAttribPointer(FFGLKVertexAttribPosition, 2, GL_FLOAT, 0, sizeof(float)*4, data);
glVertexAttribPointer(FFGLKVertexAttribTexCoord0, 2, GL_FLOAT, 0, sizeof(float)*4, data+8);
glBindTexture(GL_TEXTURE_2D, texture.getTextureID());
glDrawElements(GL_TRIANGLE_STRIP, _textureVertexIndicesCount, GL_UNSIGNED_SHORT, _textureVertexIndices);
}
glDisable((GL_SCISSOR_TEST));
// Test - are correct values rendered into the frame buffer?
uint8_t *outdata = new uint8_t[100*4];
glReadPixels(0, 0, (GLsizei)2, (GLsizei)4, GL_RGBA, GL_UNSIGNED_BYTE, outdata);
for (int i=0; i < 8; i++) {
printf("render: Value: %i\n", outdata[i]); // Prints values as specified in glClearColor above (0,0,16,255)
}
printf("glGetError: %d\n", glGetError() );
delete [] outdata;
}
}
Error 1281 resolved (openGL newbie mistake) - needed to set context:
(Still not rendering into frame buffer, but another hurdle cleared.)
-(id)init {
if (self = [super init]) {
self.myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:self.myContext]; // <-- ADDED
_myObject = new MyClass();
BOOL result = [self loadShaders];
}
return self;
}

Implementing a fragment shader that uses a uniform Sampler2D (lwjgl)

I am unable to successfully run a shader, and I seem to be missing some step to make it all work. I end up with the error of:
Exception in thread "main" org.lwjgl.opengl.OpenGLException: Invalid operation (1282)
at org.lwjgl.opengl.Util.checkGLError(Util.java:59)
at org.lwjgl.opengl.GL20.glUniform1i(GL20.java:374)
at sprites.Sprite.draw(Sprite.java:256)
at gui.Game.drawFrame(Game.java:238)
at gui.Game.gameLoop(Game.java:205)
at gui.Game.startGame(Game.java:244)
at tests.simple.SimpleShader.main(SimpleShader.java:36)
My initialization begins with:
int frag = FilterLoader.createShader("/tests/resources/shaders/grayscale.frag", GL20.GL_FRAGMENT_SHADER);
and the createShader method looks like the following:
int shader = GL20.glCreateShader(type);
if(shader == 0)
return 0;
StringBuilder code = new StringBuilder("");
String line;
try
{
String path = FilterLoader.class.getResource(filename).getPath();
BufferedReader reader = new BufferedReader(new FileReader(path));
while((line = reader.readLine()) != null)
{
code.append(line + "\n");
}
}
catch(Exception e)
{
e.printStackTrace();
System.err.println("Error reading in " + type + " shader");
return 0;
}
GL20.glShaderSource(shader, code);
GL20.glCompileShader(shader);
return shader;
I then attach the shader to the specific Sprite with:
two.addFragmentShader(frag); //two is a Sprite
which is just simply:
fragmentShader = fragment_shader;
GL20.glAttachShader(shader, fragment_shader);
GL20.glLinkProgram(shader);
The int shader has been previously initialized in the Sprites constructor with:
shader = GL20.glCreateProgram();
This was a previous problem, but no longer obviously. Now I get to where the actual error occured, in the Sprites (two in this case) draw method, which looks like so:
if(true)
{
GL20.glUseProgram(shader);
}
glPushMatrix();
glActiveTexture(GL13.GL_TEXTURE0);
imageData.getTexture().bind();
//The line below is where the error occurs.
GL20.glUniform1i(fragmentShader, GL13.GL_TEXTURE0);
int tx = (int)location.x;
int ty = (int)location.y;
glTranslatef(tx, ty, location.layer);
float texture_X = ((float)which_column/(float)columns);
float texture_Y = ((float)which_row/(float)rows);
float texture_XplusWidth = ((float)(which_column+wide)/(float)columns);
float texture_YplusHeight = ((float)(which_row+tall)/(float)rows);
glBegin(GL_QUADS);
{
GL11.glTexCoord2f(texture_X, texture_Y);
glVertex2f(0, 0);
GL11.glTexCoord2f(texture_X, texture_YplusHeight);
glVertex2f(0, getHeight());
GL11.glTexCoord2f(texture_XplusWidth, texture_YplusHeight);
glVertex2f(getWidth(), getHeight());
GL11.glTexCoord2f(texture_XplusWidth, texture_Y);
glVertex2f(getWidth(), 0);
}
glEnd();
GL20.glUseProgram(0);
glPopMatrix();
And the error occurs at this line:
GL20.glUniform1i(fragmentShader, GL13.GL_TEXTURE0);
And for reference my shader:
// simple fragment shader
uniform sampler2D texture;
void main()
{
vec4 color, texel;
color = gl_Color;
texel = texture2DRect(texture, gl_TexCoord[0].xy);
color *= texel;
float gray = dot(color.rgb, vec3(0.299, 0.587, 0.144));
gl_FragColor = vec4(gray, gray, gray, color.a);
}
I've gone through the tutorials, read about the error, and I can't figure out what step I have missed.
GL20.glUniform1i(fragmentShader, GL13.GL_TEXTURE0);
This is wrong. The first parameter of glUniform1i is the uniform location, which you can get with glGetUniformLocation.
The second parameter is an integer, but for texture sampler, you need to pass the texture unit number (0, 1, 2, etc), and bind the texture to that texture unit, for example:
glUseProgram(program);
int loc = glGetUniformLocation(program, "texture");
glUniform1i(loc, 0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texId);
Then it should work.

What is causing OpenGL 2.0 based shader handling code to crash in ::glBegin()?

For what it's worth:
my graphics drivers (ATI Mobility Radeon 4570) are up to date.
I have managed to get ARB extensions based shader handling code working on the same config previously (although it's been a while before I came back to shaders).
I was following the Lighthouse3d tutorials in my setup (...loosely: Lighthouse3d shader setup the shader compile log code had been ripped from a SO question, sorry about the lack of credit, I can't seem to find it now)
Basically, all my OpenGL operations seem to execute successfully; the shaders compile and the program links. As you can see below, I have ended up checking OpenGL errors after each one of the gl*** calls. The shaders themselves are trivial.
Then again, of course, I might have overlooked something ridiculously simple.
#define GLOP(operation) operation; if(!GFX::CheckError(#operation)) exit(1)
// GFX::CheckError() prints an error message, if any.
GFX::Init(640, 480, 24, 0);
GLOP(Texture2D::Enable());
// shader setup
GLuint hFrag = GLOP(::glCreateShader(GL_FRAGMENT_SHADER));
GLuint hVert = GLOP(::glCreateShader(GL_VERTEX_SHADER));
GLuint hProg = GLOP(::glCreateProgram());
std::ifstream inFile;
ReadOpen("shader.frag", inFile);
std::string str(ReadFile(inFile));
const char* pSource = str.c_str();
GLOP(::glShaderSource(hFrag, 1, &pSource, 0));
GLOP(::glCompileShader(hFrag));
GLint logLength;
glGetShaderiv(hFrag, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar* log = (GLchar*)malloc(logLength);
glGetShaderInfoLog(hFrag, logLength, &logLength, log);
printf("Shader compile log:\n%s\n", log);
free(log);
}
XR::ReadOpen(core.GetPath() + "shader.vert", inFile);
XRLOG(XR::GetFileSize(inFile) << " bytes in file." << std::endl);
str = XR::ReadFile(inFile);
pSource = str.c_str();
GLOP(::glShaderSource(hVert, 1, &pSource, 0));
GLOP(::glCompileShader(hVert));
glGetShaderiv(hVert, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar* log = (GLchar*)malloc(logLength);
glGetShaderInfoLog(hVert, logLength, &logLength, log);
printf("Shader compile log:\n%s\n", log);
free(log);
}
GLOP(::glAttachShader(hProg, hFrag));
GLOP(::glAttachShader(hProg, hVert));
GLOP(::glLinkProgram(hProg));
glGetProgramiv(hProg, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0) {
GLchar* log = (GLchar*)malloc(logLength);
glGetProgramInfoLog(hProg, logLength, &logLength, log);
printf("Program link log:\n%s\n", log);
free(log);
}
GLOP(::glUseProgram(hProg));
// get uniform's location
GLint locTex0 = GLOP(::glGetUniformLocation(hProg, "tex0"));
/// [loading image, creating texture goes here. works perfectly.]
while(core.IsRunning())
{
GLOP(::glActiveTexture(GL_TEXTURE0));
GLOP(pTex->Bind());
GLOP(::glUniform1i(locTex0, GL_TEXTURE0));
GLOP(::glPushMatrix());
GLOP(::glTranslatef(GFX::GetFlopWidth() / 2, GFX::GetHeight() / 2, .0f)); // still no errors
::glBegin(GL_TRIANGLE_FAN); // crash
::glTexCoord2f(.0f, 1.0f);
::glVertex2f(-100.0f, -100.0f);
::glTexCoord2f(1.0f, 1.0f);
::glVertex2f(100.0f, -100.0f);
::glTexCoord2f(1.0f, .0f);
::glVertex2f(100.0f, 100.0f);
::glTexCoord2f(.0f, .0f);
::glVertex2f(-100.0f, 100.0f);
::glEnd();
::glPopMatrix();
}
Vertex shader:
void main( void )
{
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
gl_TexCoord[0] = gl_MultiTexCoord0;
}
Fragment shader:
uniform sampler2D tex0;
void main()
{
vec2 texCoords = gl_TexCoord[0].st;
vec3 pixel = texture2D(tex0, texCoords).xyz;
gl_FragColor = vec4(pixel, 1.0);
}
Output:
Successfully set video mode 640x480#24
Shader compile log:
Fragment shader was successfully compiled to run on hardware.
Shader compile log:
Vertex shader was successfully compiled to run on hardware.
Program link log:
Vertex shader(s) linked, fragment shader(s) linked.
Does it crash on the first frame?
I kind of doubt this has anything to do with your crash, but you should know that you're calling glVertex/glTexcoord in the wrong order. glVertex terminates a vertex, so in your case the first glVertex has no texcoord set, and your last texcoord gets applied the next loop through.
Immediate mode for submitting vertices like this isn't supported in the core profile, so I'm wondering if you're specifying the core profile when setting up your OpenGL context. That would explain why it dies at glBegin.