OpenGL Shader Program Weird Result - opengl

I have created a basic shader program to brush up on my openGL GLSL.
on the cpu side i have properly created my shader program .vp and .fp linked properly and error checked
my result when i render is always a black Triangle
Right before i link my program but after i attach both shaders i do this
glBindAttribLocation( program, 0, "vVertexPos" );
glBindAttribLocation( program, 1, "vColor" );
both my position variables and my color variable in the shaders
all of this is just a quick run through so im not worried about ugly code besides the openGL calls and shader setup
struct PosColVertex
{
float pos[3];
float color[4];
};
PosColVertex verts[3];
float vPos1[3] = { 0.5f, 0.0f, -1.0f };
float vPos2[3] = { 0.0f, 1.0f, -1.0f };
float vPos3[3] = { -0.5f, 0.0f, -1.0f };
memcpy( verts[0].pos, vPos1, sizeof(float)*3 );
memcpy( verts[1].pos, vPos2, sizeof(float)*3 );
memcpy( verts[2].pos, vPos3, sizeof(float)*3 );
float vColor1[4] = { 1.0f, 0.0f, 0.0f, 1.0f };
float vColor2[4] = { 0.0f, 1.0f, 0.0f, 1.0f };
float vColor3[4] = { 0.0f, 0.0f, 1.0f, 1.0f };
memcpy( verts[0].color, vColor1, sizeof(float)*4 );
memcpy( verts[1].color, vColor2, sizeof(float)*4 );
memcpy( verts[2].color, vColor3, sizeof(float)*4 );
glGenBuffers( 1, &vboHandle );
glBindBuffer( GL_ARRAY_BUFFER, vboHandle );
glBufferData( GL_ARRAY_BUFFER, sizeof(PosColVertex)*3, verts, GL_DYNAMIC_READ );
for my rendering this is what i do
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
//use our shader program
glUseProgram( program );
//set which vertices we will be using
glBindBuffer( GL_ARRAY_BUFFER, vboHandle );
glEnableVertexAttribArray( 0 );
glEnableVertexAttribArray( 1 );
//specify our vertex attribute
glVertexAttribPointer( 0, 3, GL_FLOAT, GL_FALSE, sizeof( PosColVertex ), (void*)(0) );
//specify our texture attribute
glVertexAttribPointer( 1, 4, GL_FLOAT, GL_FALSE, sizeof( PosColVertex ), (void*)(12) );
glPushMatrix();
//draw our rectangle
glDrawArrays( GL_TRIANGLES, 0, 3 );
glPopMatrix();
glDisableVertexAttribArray( 0 );
glDisableVertexAttribArray( 1 );
did i do something wrong with the glVertexAttribPointer calls? i've checked my shader it works and it does change the values since i hard coded values in there before to test it. but im assuming im not telling openGL on the CPU side how to read my verts properly. any help?
tri.vp
#version 330
in vec3 vVertexPos;
void main(void)
{
gl_Position = vec4( vVertexPos.x, vVertexPos.y, vVertexPos.z, 1 );
}
tri.fp
#version 330
out vec4 vFragColor;
in vec4 vColor;
void main(void)
{
vFragColor = vColor;
}

You can't access attributes inside of fragment shaders, only inside of vertex shaders. This makes sense because you are specifying a color for each vertex, and not for each fragment. So, I'd recommend changing your code to read in the color in the vertex shader and smoothly output it to your fragment shader:
Vertex shader:
in vec3 vVertexPos;
in vec4 vColor;
smooth out vec4 fColor;
void main(void)
{
gl_Position = vec4( vVertexPos.x, vVertexPos.y, vVertexPos.z, 1 );
fColor = vColor;
}
Fragment shader:
smooth in vec4 fColor;
out vec4 vFragColor;
void main(void)
{
vFragColor = fColor;
}

Related

OpenGL ignores Quads and makes them Triangles

This is the second time I'm making a game engine, but I'm a little stuck right now, since I cannot figure out why this is happening, no matter what object I send, OpenGL only draws a White Triangle on the center of the screen, like this.
I've even coppied my old code from my last engine on the Renderer and the Camera Objects, still acts the same, so I´m guessing it has something to do with the Render Script.
Renderer:
Renderer2D::Renderer2D(const Shader& shader) {
this->shader = shader;
this->Init();
}
Renderer2D::~Renderer2D() {
glDeleteVertexArrays(1, &this->QuadVAO);
}
void Renderer2D::Render(Texture & texture, iVec2 position, iVec2 size, float rotation, iVec3 color) {
this->shader.Use();
iMat4 model;
using namespace glm;
model = translate(model, iVec3(position.x, position.y, 0.0f));
/*
model = translate(model, iVec3(size.x * 0.5f, size.y * 0.5f, 0.0f));
model = rotate(model, rotation, iVec3(0.0f, 0.0f, 1.0f));
model = translate(model, iVec3(size.x * -0.5f, size.y * -0.5f, 0.0f));
*/
model = glm::scale(model, iVec3(size.x, size.y, 1.0f));
this->shader.SetMatrix4("model2D", model);
this->shader.SetVector3f("color2D", color);
glActiveTexture(GL_TEXTURE0);
texture.Bind();
glBindVertexArray(this->QuadVAO);
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindVertexArray(0);
}
void Renderer2D::Init() {
U16 VBO;
float vertices[] = {
// Pos // Tex
0.0f, 1.0f, 0.0f, 1.0f,
1.0f, 0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 1.0f, 1.0f,
1.0f, 0.0f, 1.0f, 0.0f
};
glGenVertexArrays(1, &this->QuadVAO);
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
glBindVertexArray(this->QuadVAO);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 4, GL_FLOAT, GL_FALSE, 4 * sizeof(float), (void*)0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
}
Vertex Shader:
#version 330 core
layout (location = 0) in vec4 vertex; // <vec2 position, vec2 texCoords>
out vec2 TexCoords;
uniform mat4 view2D;
uniform mat4 model2D;
uniform mat4 projection2D;
void main()
{
TexCoords = vertex.zw;
gl_Position = projection2D * view2D * mode2Dl * vec4(vertex.xy, 0.0, 1.0);
}
Edit:
Vertex Shader:
#version 330 core
layout (location = 0) in vec4 vertex; // <vec2 position, vec2 texCoords>
out vec2 TexCoords;
uniform mat4 view2D;
uniform mat4 model2D;
uniform mat4 projection2D;
void main()
{
TexCoords = vertex.zw;
gl_Position = projection2D * view2D * model2D * vec4(vertex.xy, 0.0, 1.0);
}
Fragment Shader:
#version 330 core
in vec2 TexCoords;
out vec4 color2D; //Fixed the error!
uniform sampler2D image2D;
uniform vec3 color2D;
void main()
{
color2D = vec4(color2D, 1.0) * texture(image2D, TexCoords);
}
Resources.cpp
Shader Resources::LoadShaderFromFile(const char * vertexSource, const char * fragmentSource) {
using namespace std;
string vertexCode;
string fragmentCode;
try {
ifstream vertexShaderFile(vertexSource);
ifstream fragmentShaderFile(fragmentSource);
stringstream vShaderStream, fShaderStream;
vShaderStream << vertexShaderFile.rdbuf();
fShaderStream << fragmentShaderFile.rdbuf();
vertexShaderFile.close();
fragmentShaderFile.close();
vertexCode = vShaderStream.str();
fragmentCode = fShaderStream.str();
}
catch (exception e) {
cout << "ERROR::SHADER: Failed to read shader files" << std::endl;
}
const char *vShaderCode = vertexCode.c_str();
const char *fShaderCode = fragmentCode.c_str();
Shader shader;
shader.Compile(vShaderCode, fShaderCode);
return shader;
}
You'r shader doesn't even compile. When you declare the matrix uniforms, then you use the names model2D, view2D and projection2D:
uniform mat4 view2D;
uniform mat4 model2D;
uniform mat4 projection2D;
But when you use the matrices, then you use the names view, model and projection:
gl_Position = projection * view * model * vec4(vertex.xy, 0.0, 1.0);
I recommend to check if the shader objects compiled successfully and if the program object link successfully.
If the compiling of a shader succeeded can be checked by glGetShaderiv and the parameter GL_COMPILE_STATUS.
e.g.
GLuint shaderObj = .... ;
glCompileShader( shaderObj );
GLint status = GL_TRUE;
glGetShaderiv( shaderObj, GL_COMPILE_STATUS, &status );
if ( status == GL_FALSE )
{
GLint logLen;
glGetShaderiv( shaderObj, GL_INFO_LOG_LENGTH, &logLen );
std::vector< char >log( logLen );
GLsizei written;
glGetShaderInfoLog( shaderObj, logLen, &written, log.data() );
std::cout << "compile error:" << std::endl << log.data() << std::endl;
}
If the linking of a program was successful can be checked by glGetProgramiv and the parameter GL_LINK_STATUS.
e.g.
GLuint progObj = ....;
glLinkProgram( progObj );
GLint status = GL_TRUE;
glGetProgramiv( progObj, GL_LINK_STATUS, &status );
if ( status == GL_FALSE )
{
GLint logLen;
glGetProgramiv( progObj, GL_INFO_LOG_LENGTH, &logLen );
std::vector< char >log( logLen );
GLsizei written;
glGetProgramInfoLog( progObj, logLen, &written, log.data() );
std::cout << "link error:" << std::endl << log.data() << std::endl;
}

Creating a Camera with OpenGL 3.3 and Qt 5.6

I'm quite new to OpenGL and Qt and I've been so far so good with it. Rendering a simple triangle with OpenGL 3.3 wasn't that hard but integrating a Camera is. For some reason my triangle dissappears!? Did I get the math wrong to calculate the matrices? I used these 2 tutorials as a starting point: http://www.opengl-tutorial.org/beginners-tutorials/tutorial-3-matrices/
https://wiki.qt.io/How_to_use_OpenGL_Core_Profile_with_Qt
My code(the most important parts only):
void GLWidget::initializeGL()
{
QGLFormat glFormat = QGLWidget::format();
if ( !glFormat.sampleBuffers() )
qWarning() << "Could not enable sample buffers";
// Set the clear color to black
glClearColor( 0.0f, 0.0f, 0.0f, 1.0f );
// Prepare a complete shader program…
if ( !prepareShaderProgram( "A:/Projekte/Qt Workspace/Projects/CGOpenGL/simple.vert", "A:/Projekte/Qt Workspace/Projects/CGOpenGL/simple.frag" ) )
return;
/////Matrix calculations/////
projection.perspective(45.0f, 4.0f / 3.0f, 0.1f, 100.0f);
view.lookAt(QVector3D(4,3,3),QVector3D(0,0,0),QVector3D(0,1,0));
model = QMatrix4x4();
//////////
// We need us some vertex data. Start simple with a triangle ;-)
GLfloat points[] = {-0.5f, -0.5f, 0.0f,
1.0f, 0.5f, -0.5f,
0.0f, 1.0f, 0.0f,
0.5f, 0.0f, 1.0f};
vertexBuffer.create();
vertexBuffer.setUsagePattern( QGLBuffer::StaticDraw );
if ( !vertexBuffer.bind() )
{
qWarning() << "Could not bind vertex buffer to the context";
return;
}
vertexBuffer.allocate( points, 3 * 4 * sizeof( float ) );
// Bind the shader program so that we can associate variables from
// our application to the shaders
if ( !shader.bind() )
{
qWarning() << "Could not bind shader program to context";
return;
}
// Enable the "vertex" attribute to bind it to our currently bound
// vertex buffer.
shader.setAttributeBuffer( "vertex", GL_FLOAT, 0, 4 );
shader.enableAttributeArray( "vertex" );
}
void GLWidget::paintGL()
{
// Clear the buffer with the current clearing color
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
// Set the MVP variable in the shader
shader.setUniformValue("MVP",projection * view * model);
// Draw stuff
glDrawArrays( GL_TRIANGLES, 0, 3 );
}
Vertex Shader:
#version 330
layout(location = 0) in vec3 vertexPosition_modelspace;
uniform mat4 MVP;
void main( void )
{
gl_Position = MVP * vec4(vertexPosition_modelspace,1);
}
you made a call to
shader.enableAttributeArray( "vertex" );
yet named it:
vertexPosition_modelspace
in the shader, you need to change the names to be consistent.
Try renaming the variable in the shader to "vertex"

OpenGL 4.1 not using entire vertex attributes at locations above 0

I was drawing a 2D image on screen, and it was working successfully until I decided to add a color attribute. When I did this, I noticed the shader set the color of the entire image only to the first four components of my color buffer (which has 24 components).
Initialization:
//These are scaled before drawing
GLfloat vertex_buffer_data[] = {
0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 1.0f,
0.0f, 1.0f, 1.0f,
1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 1.0f,
0.0f, 1.0f, 1.0f
};
glGenBuffers( 1, &m_2DVertexBuffer );
glBindBuffer( GL_ARRAY_BUFFER, m_2DVertexBuffer );
glBufferData( GL_ARRAY_BUFFER, sizeof( vertex_buffer_data ), vertex_buffer_data, GL_STATIC_DRAW );
//Simply test values to see which values the shader uses. Only uses the first 4.
GLfloat color_buffer_data[] = {
1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
0.0f, 1.0f, 1.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f, 1.0f
};
glGenBuffers( 1, &m_2DColorBuffer );
glBindBuffer( GL_ARRAY_BUFFER, m_2DColorBuffer );
glBufferData( GL_ARRAY_BUFFER, sizeof( color_buffer_data ), color_buffer_data, GL_STATIC_DRAW );
Binding:
glEnableVertexAttribArray( 0 );
glEnableVertexAttribArray( 1 );
glBindBuffer( GL_ARRAY_BUFFER, m_2DVertexBuffer );
glVertexAttribPointer( 0, 3, GL_FLOAT, GL_FALSE, 0, ( void * )0 );
glBindBuffer( GL_ARRAY_BUFFER, m_2DColorBuffer );
glVertexAttribPointer( 1, 4, GL_FLOAT, GL_FALSE, 0, ( void * )0 );
Drawing:
glDrawArrays( GL_TRIANGLE_STRIP, 0, 6 );
Vertex Shader:
#version 410
layout( location = 0 ) in vec3 position;
layout( location = 1 ) in vec4 color;
out vec2 textureCoord;
out vec4 textureColor;
uniform mat4 ProjView;
uniform mat4 ModelMat;
uniform mat4 ViewMat;
void main()
{
vec4 pos = vec4( position.x, position.y, position.z, 1 );
textureColor = color;
textureCoord = pos.xy;
gl_Position = ProjView * ModelMat * ViewMat * pos;
}
Fragment shader:
#version 410
in vec2 textureCoord;
in vec4 textureColor;
uniform sampler2D texUnit;
out vec4 outputColor;
void main()
{
outputColor = texture( texUnit, textureCoord ) * textureColor;
}
I'm on OSX 10.9, if it matters.

Opengl deferred lighting shader

I just started to learning OpenGL 3.1 and I'm trying to implement deferred shading to my engine(framework?). I wrote simple shaders for first stage, lighting stage and deferred stage.
Lighting stage takes the diffuse color from deferred texture and saves it in lighting texture. Deferred stage draws the lighting texture. In lighting shader is a bug and scene is very strange. It looks like this, and it should look like this. Lighting stage vertex shader:
#version 150
in vec4 vertex;
out vec2 position;
void main(void)
{
gl_Position = vertex*2-1;
gl_Position.z = 0.0;
position.xy = vertex.xy;
}
Lighting stage fragment shader:
#version 150
in vec2 position;
uniform sampler2D diffuseTexture;
uniform sampler2D positionTexture;
out vec4 lightingOutput;
void main()
{
vec4 diffuse = texture(diffuseTexture, vec2(position.x, position.y));
vec4 position = texture(positionTexture, position.xy);
vec4 ambient = vec4(0.05, 0.05, 0.05, 1.0) * diffuse;
lightingOutput = diffuse;
}
That's what I render in lighting stage:
static const GLfloat _vertices[] =
{
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
0.0f, 1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
};
And that's how I render it:
glUseProgram( programID[2] );
glEnableVertexAttribArray(vertexID[1]);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(
vertexID[1],
3, // size
GL_FLOAT, // type
GL_FALSE, // normalized?
0, // stride
(void*)0 // array buffer offset
);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, positionTexture);
glUniform1i(positionTextureID[1], 1);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, diffuseTexture);
glUniform1i(diffuseTextureID[1], 0);
glDrawArrays( GL_TRIANGLES, 0, 6 );
glDisableVertexAttribArray(vertexID[1]);
If you need all the code it's here www.dropbox.com/s/hvfe4v4pb1pfxb3/code.zip.
How to fix that strange problem?

Weird behavior of OpenGL / glGetUniformLocation()

I just wanna do the basics... give the shaders information from my application. I tried everything and nothing worked because I can never figure out what is new and what is deprecated in OpenGL
Vertex Shader:
#version 420 core
layout(location = 0) in vec2 p_rect;
layout(location = 1) in vec2 p_clipRect;
out vec2 texturePoint;
void main()
{
gl_Position = vec4( p_rect, 0.0, 1.0 );
texturePoint = p_clipRect;
}
Fragment Shader:
#version 420 core
uniform sampler2D p_texture;
in vec2 texturePoint;
out vec4 outColor;
void main()
{
outColor = texture( p_texture, texturePoint );
}
OpenGL Code:
glActiveTexture( GL_TEXTURE0 );
glBindTexture( GL_TEXTURE_2D, texture );
GLint texture_id( glGetUniformLocation( programId, "p_texture" ) );
glUniform1i( texture_id, texture );
// Element
static const GLushort element[] = { 0, 1, 2, 3 };
GLuint element_id;
glGenBuffers( 1, &element_id );
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, element_id );
glBufferData( GL_ELEMENT_ARRAY_BUFFER, sizeof( element ), element, GL_STATIC_DRAW );
// Vertex data
struct VertexInput
{
GLfloat m_rect[ 8 ];
GLfloat m_clipRect[ 8 ];
}
vertex;
// fill it up with data
GLfloat vertex_data[] =
{
-1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, -1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
1.0f, 1.0f,
};
memcpy( &vertex, &vertex_data, sizeof( vertex_data ) );
// VBO
GLuint vertex_id;
glGenBuffers( 1, &vertex_id );
glBindBuffer( GL_ARRAY_BUFFER, vertex_id );
glBufferData( GL_ARRAY_BUFFER, sizeof(vertex), &vertex, GL_STATIC_DRAW );
glEnableVertexAttribArray( 0 );
glEnableVertexAttribArray( 1 );
glVertexAttribPointer( 0, 2, GL_FLOAT, GL_TRUE, 0, (void*)offsetof( VertexInput, m_rect ) );
glVertexAttribPointer( 1, 2, GL_FLOAT, GL_TRUE, 0, (void*)offsetof( VertexInput, m_clipRect ) );
// render the VBO
glBindBuffer( GL_ELEMENT_ARRAY_BUFFER, element_id );
glDrawElements( GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, (void*)0 );
// clean it up ;-)
glDisableVertexAttribArray( 0 );
glDisableVertexAttribArray( 1 );
The weird thing:
Case 1: If programId is the actual program id, I get texture_id = 0 but no information gets to the shader... nothing happens...
Case 2: If programId is anything other than the actual program id, I get texture_id = -1 but my code RENDERS the image perfectly (WEARD).
The thing is that... I know it's still wrong. I need to be able to give information to the shader and then render... I really don't know how the case 2 is working, but the fact is that I need to give more information to the shaders, like other textures, MVP matrix and so son. I can't do it. What is wrong? How do I give the correct value of my program id and get this information in the shader?
glActiveTexture( GL_TEXTURE0 );
...
glUniform1i( texture_id, texture );
Try this instead:
glActiveTexture( GL_TEXTURE0 + 0 );
...
glUniform1i( texture_id, 0 );
Sampler uniforms should be assigned the index of the desired texture unit, not the texture object ID.