Passing vertex attributes with layouts in geometry shader - opengl

Let's say we have a GL program consisting of the following vertex, geometry and fragment shaders.
vertex:
#version 410
layout (location = 0) in vec2 pos;
layout (location = 1) in vec2 size;
layout (location = 2) in float rot;
layout (location = 3) in vec4 color;
layout (location = 0) out vec2 p;
layout (location = 1) out vec2 s;
layout (location = 2) out float r;
layout (location = 3) out vec4 c;
void main(){
gl_Position = vec4(pos,0.0,1.0);
p = pos;
s = size;
r = rot;
c = color;
}
geometry:
#version 410
layout (points) in;
layout (triangle_strip, max_vertices = 4) out;
layout (location = 0) in vec2 pos;
layout (location = 1) in vec2 size;
layout (location = 2) in float rot;
layout (location = 3) in vec4 color;
layout (location = 0) out vec2 p;
layout (location = 1) out vec2 s;
layout (location = 2) out float r;
layout (location = 3) out vec4 c;
void main()
{
gl_Position = gl_in[0].gl_Position + vec4(-0.2, -0.2, 0.0, 0.0); // 1:bottom-left
EmitVertex();
gl_Position = gl_in[0].gl_Position + vec4( 0.2, -0.2, 0.0, 0.0); // 2:bottom-right
EmitVertex();
gl_Position = gl_in[0].gl_Position + vec4(-0.2, 0.2, 0.0, 0.0); // 3:top-left
EmitVertex();
gl_Position = gl_in[0].gl_Position + vec4( 0.2, 0.2, 0.0, 0.0); // 4:top-right
EmitVertex();
EndPrimitive();
p = pos;
s = size;
r = rot;
c = color;
}
fragment:
#version 410
layout (location = 0) in vec2 pos;
layout (location = 1) in vec2 size;
layout (location = 2) in float rot;
layout (location = 3) in vec4 color;
out vec4 FragColor;
void main()
{
FragColor = color;
}
The program takes points and display them as quads.
What I'm trying to do here is to pass the 4 vertex attributes (pos, size, rot and color) to the fragment shader through the geometry shader.
If I remove the geometry shader from my program, it manages to pass the vertex attributes to the fragment shaders and it displays colored dots.
If I keep the geometry shader and remove the layouts (in and out), it displays black quads.
Am I missing something here?

There are a couple of issues in your code. I've renamed all of the variables so you can see what's going on (This probably wouldn't be a massive problem, since you are using layout qualifiers, but I'll stick with a naming that makes it obvious what's happening).
#version 410
// per-vertex attributes from the vertex buffers
layout (location = 0) in vec2 vs_pos;
layout (location = 1) in vec2 vs_size;
layout (location = 2) in float vs_rot;
layout (location = 3) in vec4 vs_color;
// per-vertex inputs to the geometry shader
layout (location = 0) out vec2 gs_pos;
layout (location = 1) out vec2 gs_size;
layout (location = 2) out float gs_rot;
layout (location = 3) out vec4 gs_color;
void main(){
gl_Position = vec4(vs_pos,0.0,1.0);
// pass vars from vertex shader, to geometry shader
gs_pos = vs_pos;
gs_size = vs_size;
gs_rot = vs_rot;
gs_color = vs_color;
}
#version 410
layout (points) in;
layout (triangle_strip, max_vertices = 4) out;
// when passed to the geometry shader, these variables are now arrays!
// be sure to declare them as such...
layout (location = 0) in vec2 gs_pos[];
layout (location = 1) in vec2 gs_size[];
layout (location = 2) in float gs_rot[];
layout (location = 3) in vec4 gs_color[];
// the outputs to the fragment shader are presented as
// single attributes though
layout (location = 0) out vec2 fs_pos;
layout (location = 1) out vec2 fs_size;
layout (location = 2) out float fs_rot;
layout (location = 3) out vec4 fs_color;
void main()
{
// pass vars for 1st vertex generated
// (you may be able to get away with setting these once prior
// to the first EmitVertex call, not sure, it's been a while!)
fs_pos = gs_pos[0];
fs_size = gs_size[0];
fs_rot = gs_rot[0];
fs_color = gs_color[0];
gl_Position = gl_in[0].gl_Position + vec4(-0.2, -0.2, 0.0, 0.0);
EmitVertex(); //< emits all params above!
// pass vars for 2nd vertex generated, etc...
fs_pos = gs_pos[0];
fs_size = gs_size[0];
fs_rot = gs_rot[0];
fs_color = gs_color[0];
gl_Position = gl_in[0].gl_Position + vec4( 0.2, -0.2, 0.0, 0.0);
EmitVertex(); //< emits all params above!
fs_pos = gs_pos[0];
fs_size = gs_size[0];
fs_rot = gs_rot[0];
fs_color = gs_color[0];
gl_Position = gl_in[0].gl_Position + vec4(-0.2, 0.2, 0.0, 0.0);
EmitVertex(); //< emits all params above!
fs_pos = gs_pos[0];
fs_size = gs_size[0];
fs_rot = gs_rot[0];
fs_color = gs_color[0];
gl_Position = gl_in[0].gl_Position + vec4( 0.2, 0.2, 0.0, 0.0);
EmitVertex(); //< emits all params above!
EndPrimitive();
}
And finally the geometry shader.
#version 410
// the inputs to the fragment shader would have been interpolated
// across the geometry primitive emitted by the GS.
layout (location = 0) in vec2 fs_pos;
layout (location = 1) in vec2 fs_size;
layout (location = 2) in float fs_rot;
layout (location = 3) in vec4 fs_color;
out vec4 FragColor;
void main()
{
FragColor = fs_color;
}

Related

OpenGL line width geometry shader

I am trying to implement geometry shader for line thickness using OpenGL 4.3.
I followed accepted answer and other given solutions of stackoverflow, but it is wrong according to the screenshot. Is there any proper way how can I get a normal of a screen? It seems correct in the first frame but the moment I move my mouse, the camera changes and offset direction is not correct. The shader is updated by camera matrix in while loop.
GLSL Geometry shader to replace glLineWidth
Vertex shader
#version 330 core
layout (location = 0) in vec3 aPos;
uniform mat4 projection_view_model;
void main()
{
gl_Position = projection_view_model * vec4(aPos, 1.0);
}
Fragment shader
#version 330 core
//resources:
//https://stackoverflow.com/questions/6017176/gllinestipple-deprecated-in-opengl-3-1
out vec4 FragColor;
uniform vec4 uniform_fragment_color;
void main()
{
FragColor = uniform_fragment_color;
}
Geometry shader
#version 330 core
layout (lines) in;
layout(triangle_strip, max_vertices = 4) out;
uniform float u_thickness ;
uniform vec2 u_viewportSize ;
in gl_PerVertex
{
vec4 gl_Position;
//float gl_PointSize;
//float gl_ClipDistance[];
} gl_in[];
void main() {
//https://stackoverflow.com/questions/54686818/glsl-geometry-shader-to-replace-gllinewidth
vec4 p1 = gl_in[0].gl_Position;
vec4 p2 = gl_in[1].gl_Position;
vec2 dir = normalize((p2.xy - p1.xy) * u_viewportSize);
vec2 offset = vec2(-dir.y, dir.x) * u_thickness*100 / u_viewportSize;
gl_Position = p1 + vec4(offset.xy * p1.w, 0.0, 0.0);
EmitVertex();
gl_Position = p1 - vec4(offset.xy * p1.w, 0.0, 0.0);
EmitVertex();
gl_Position = p2 + vec4(offset.xy * p2.w, 0.0, 0.0);
EmitVertex();
gl_Position = p2 - vec4(offset.xy * p2.w, 0.0, 0.0);
EmitVertex();
EndPrimitive();
}
To get the direction of the line in normalized device space, the x and y components of the clip space coordinated must be divided by the w component (perspective divide):
vec2 dir = normalize((p2.xy - p1.xy) * u_viewportSize);
vec2 dir = normalize((p2.xy / p2.w - p1.xy / p1.w) * u_viewportSize);

Geometry shader odd issue

I want to use geometry shader to draw triangles of mesh, but encounter a really odd issue.
Result As follow: The wrong output.
The Right output.
The only diff between wrong and right in code, is when converting 3d position vector to 4d position vector. The right one did in vertex shader. The wrong one did in geometry shader.
Code as follow. Why this happend?
#version 330 core
layout (location = 0) in vec3 pos;
layout (location = 1) in vec3 normal;
uniform mat4 model;
uniform mat4 view;
uniform mat4 project;
out vec3 normal_;
out vec4 pos_;
out vec3 pos_bug_;
out mat4 mvp_;
void main()
{
mvp_ = project * view * model;
normal_ = normal;
pos_ = vec4(pos, 1.0);
pos_bug_ = pos;
}
#version 330 core
layout (triangles) in;
layout (line_strip, max_vertices = 12) out;
uniform float length = 0.4f;
out vec4 color;
in mat4 mvp_[];
in vec3 normal_[];
in vec4 pos_[];
in vec3 pos_bug_[];
void GenNormal(int index) {
color = vec4(1, 1, 0, 1);
gl_Position = mvp_[0] * pos_[index];
EmitVertex();
gl_Position = mvp_[0] * pos_[index] + vec4(normal_[index], 0.0) * length;
EmitVertex();
EndPrimitive();
}
void GenTriangle(int index0, int index1) {
color = vec4(1, 1, 1, 1);
gl_Position = mvp_[0] * pos_[index0]; // Right
// gl_Position = mvp_[0] * vec4(pos_bug_[index0], 1.0); // Wrong
EmitVertex();
gl_Position = mvp_[0] * pos_[index1]; // Right
// gl_Position = mvp_[0] * vec4(pos_bug_[index1], 1.0); // Wrong
EmitVertex();
EndPrimitive();
}
void main()
{
GenNormal(0);
GenNormal(1);
GenNormal(2);
GenTriangle(0, 1);
GenTriangle(1, 2);
GenTriangle(0, 2);
}

Specular light erratic in OpenGL

I am working on a shader where the fragment shader should work in the tangent space. It works just as expected for both the ambient and diffuse light, but the specular light is just plain weird. It seems that nearby fragments can have a lot or no light with no obvious reasons.
The vertex shader is:
#version 330 core
layout (location = 0) in vec3 inVertex;
layout (location = 1) in vec3 inNormal;
layout (location = 2) in vec2 inTexture;
layout (location = 3) in vec3 inTangent;
layout (location = 4) in vec3 inBitangent;
out vec3 FragmentPosition;
out vec2 TextureCoordinate;
out vec3 TangentLightDirection;
out vec3 TangentViewPosition;
out vec3 TangentFragmentPosition;
void main()
{
FragmentPosition = vec3(inVertex);
vec3 normal = normalize(inNormal);
gl_Position = vec4( inVertex, 1 );
TextureCoordinate = inTexture;
vec3 tangent = normalize(inTangent);
vec3 biTangent = normalize(inBitangent);
mat3 toTangentSpaceTransformation = transpose(mat3(tangent,biTangent,normal));
TangentFragmentPosition = toTangentSpaceTransformation * FragmentPosition;
TangentLightPosition = toTangentSpaceTransformation * vec3(0,1,1);
TangentFragmentPosition = toTangentSpaceTransformation * vec3(0,0,3);
}
And the fragment shader is:
#version 330 core
out vec4 FragColor;
in vec3 FragmentPosition;
in vec2 TextureCoordinate;
in vec3 TangentLightDirection;
in vec3 TangentViewPosition;
in vec3 TangentFragmentPosition;
uniform sampler2D Texture;
uniform sampler2D normalTexture;
void main() {
vec3 normal = vec3(0,0,1);
float shininess = 4;
vec3 phongVector = vec3(0.3,0.7,1);
vec4 color = texture(Texture,TextureCoordinate);
vec4 ambientLightColor = vec4(1,1,1,1);//vec4(normalOffset,1);
// Calculation of ambient light
vec4 sunLightColor = vec4(1,1,1,1);
vec3 sunLightDirection = normalize(TangentLightPosition);
vec4 ambientLight = phongVector[0] * ambientLightColor;
// Calculation of diffuse light
float diffuseConst = max(dot(normal,sunLightDirection),0.0);
vec4 diffuseLight = phongVector[1] * diffuseConst * sunLightColor;
// Calculation of specular light
vec3 viewDirection = normalize(TangentViewPosition - TangentFragmentPosition);
vec3 reflectionDirection = reflect(-sunLightDirection,normal);
float spec = pow(max(dot(reflectionDirection,viewDirection),0),shininess);
vec4 specularLight = phongVector[2] * spec * sunLightColor;
FragColor = (specularLight)*color;
}
It was a typo. tangentFragmentPosition was initialized twice, while tangentViewPosition was not initialized at all. Initizalizing tangentViewPosition gave the desired result.

glGetActiveUniform reports uniform exists, but glGetUniformLocation returns -1

I have a vertex shader that for some reason I can't get the location of one of the uniforms.
I use glGetActiveUniformto get all the uniforms available, and my uniform is there (bones[0]).
However, when I call glGetUniformLocation(shaderProgram_, "bones[0]");, it returns -1.
I also tried glGetUniformLocation(shaderProgram_, "bones");, but it also returns -1.
Here's the shader:
#version 330 core
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
uniform mat4 modelMatrix;
uniform mat4 pvmMatrix;
uniform mat3 normalMatrix;
layout (std140) uniform Bones
{
mat4 bones[100];
};
layout (location = 0) in vec3 position;
layout (location = 1) in vec4 color;
layout (location = 2) in vec3 normal;
layout (location = 3) in vec2 textureCoordinate;
layout (location = 4) in ivec4 boneIds;
layout (location = 5) in vec4 boneWeights;
out vec4 ourColor;
out vec2 texCoord;
out vec3 normalDirection;
void main()
{
mat4 bones2[100];
for(int i=0;i<100;++i)
{
bones2[i] = mat4(1.0);
}
// Calculate the transformation on the vertex position based on the bone weightings
mat4 boneTransform = bones2[ boneIds[0] ] * boneWeights[0];
boneTransform += bones2[ boneIds[1] ] * boneWeights[1];
boneTransform += bones2[ boneIds[2] ] * boneWeights[2];
boneTransform += bones2[ boneIds[3] ] * boneWeights[3];
//mat4 tempM = mat4(1.0);
//boneTransform = tempM;
// This is for animating the model
vec4 tempPosition = boneTransform * vec4(position, 1.0);
gl_Position = pvmMatrix * tempPosition;
float sum = boneWeights[0] + boneWeights[1] + boneWeights[2] + boneWeights[3];
if (sum > 1.01f)
gl_Position = pvmMatrix * vec4(position, 1.0);
else if (sum < 0.99f)
gl_Position = pvmMatrix * vec4(position, 1.0);
/*
if (boneIds[0] > 99 || boneIds[0] < 0)
gl_Position = pvmMatrix * vec4(position, 1.0);
else if (boneIds[1] > 99 || boneIds[1] < 0)
gl_Position = pvmMatrix * vec4(position, 1.0);
else if (boneIds[2] > 99 || boneIds[2] < 0)
gl_Position = pvmMatrix * vec4(position, 1.0);
else if (boneIds[3] > 99 || boneIds[3] < 0)
gl_Position = pvmMatrix * vec4(position, 1.0);
*/
// Calculate normal
vec4 normalDirTemp = boneTransform * vec4(normal, 0.0);
normalDirection = normalize(normalMatrix * normalDirTemp.xyz);
//gl_Position = vec4(position, 1.0);
//gl_Position = pvmMatrix * vec4(position, 1.0);
ourColor = color;
texCoord = textureCoordinate;
for(int i=0;i<100;++i)
{
if (bones[1] == mat4(0.0))
ourColor = vec4(0.0, 0.0, 1.0, 1.0);
}
}
I also tried using uniform mat4 bones[100]; in my shader, which did give me the uniform location (0), however, all of the data was mat4(0.0), even though I'm pushing data to the uniform.
Anyone have any ideas?
Uniforms in a uniform block do not have locations. You set their values by binding a buffer object to the appropriate binding index for that block. This is the entire point of putting uniforms in interface blocks.
In OpenGL you can't get the position of a particular array index directly. Instead, you should do glGetUniformLocation(shaderProgram_, "bones");. This will give you the location of the first element of the array. If you want to access another elements, like bones[1] or bones[20], you have to add the desired index number to the value returned by glGetUniformLocation.
So, the location of bones[0] is retrieved using glGetUniformLocation(shaderProgram_, "bones"); , the location of bones[1] is retrieved using glGetUniformLocation(shaderProgram_, "bones") + 1; , and so on.

GLSL Tessellation Displacement Mapping

in my recent project I am working with hardware side tessellation. The pipeline I want to implement should take a low poly mesh, tessellate it and apply a displacement map.
The Tessellation works fine and just as I expected it to look like. However, when I apply the displacement map in the tessellation evaluation shader I get an output which is somewhat random.
This is the output without displacement (I used the heightmap as a texture to verify whether my texCoords are accurate)
This is what I get when I enable my displacement (using the same texture for both coloring and displacement):
The shader code is as follows:
//VERTEX SHADER
#version 430
layout(location = 0) in vec4 vertex;
layout(location = 1) in vec4 normal;
layout(location = 2) in vec2 texCoord;
out vec3 vPosition;
out vec3 vNormal;
out vec2 vTexCoord;
void main() {
vPosition = vertex.xyz;
vNormal = normal.xyz;
vTexCoord = texCoord;
}
//TESS CONTROL
#version 430
layout(vertices = 3) out;
in vec3 vPosition[];
in vec3 vNormal[];
in vec2 vTexCoord[];
out vec3 tcPosition[];
out vec3 tcNormal[];
out vec2 tcTexCoord[];
uniform float innerTessLevel;
uniform float outerTessLevel;
void main(){
float inTess = innerTessLevel;
float outTess = outerTessLevel;
tcPosition[gl_InvocationID] = vPosition[gl_InvocationID];
tcNormal[gl_InvocationID] = vNormal[gl_InvocationID];
tcTexCoord[gl_InvocationID] = vTexCoord[gl_InvocationID];
if(gl_InvocationID == 0) {
gl_TessLevelInner[0] = inTess;
gl_TessLevelInner[1] = inTess;
gl_TessLevelOuter[0] = outTess;
gl_TessLevelOuter[1] = outTess;
gl_TessLevelOuter[2] = outTess;
gl_TessLevelOuter[3] = outTess;
}
}
//TESS EVAL
#version 430
layout(triangles, equal_spacing, ccw) in;
in vec3 tcPosition[];
in vec3 tcNormal[];
in vec2 tcTexCoord[];
out vec3 tePosition;
out vec2 teTexCoord;
uniform mat4 ModelViewProjection;
uniform mat4 ModelView;
uniform sampler2D texHeight;
void main(){
vec3 p0 = gl_TessCoord.x * tcPosition[0];
vec3 p1 = gl_TessCoord.y * tcPosition[1];
vec3 p2 = gl_TessCoord.z * tcPosition[2];
vec3 pos = p0 + p1 + p2;
vec3 n0 = gl_TessCoord.x * tcNormal[0];
vec3 n1 = gl_TessCoord.y * tcNormal[1];
vec3 n2 = gl_TessCoord.z * tcNormal[2];
vec3 normal = normalize(n0 + n1 + n2);
vec2 tc0 = gl_TessCoord.x * tcTexCoord[0];
vec2 tc1 = gl_TessCoord.y * tcTexCoord[1];
vec2 tc2 = gl_TessCoord.z * tcTexCoord[2];
teTexCoord = tc0 + tc1 + tc2;
float height = texture(texHeight, teTexCoord).x;
pos += normal * (height * 0.2f);
gl_Position = ModelViewProjection * vec4(pos, 1);
tePosition = vec3(ModelView * vec4(pos,1.0)).xyz;
}
//GEOMETRY
#version 430
layout(triangles) in;
layout(triangle_strip, max_vertices = 3) out;
uniform mat4 ModelView;
in vec3 tePosition[3];
in vec3 tePatchDistance[3];
in vec2 teTexCoord[3];
out vec3 gFacetNormal;
out vec2 gTexCoord;
void main() {
vec3 A = tePosition[2] - tePosition[0];
vec3 B = tePosition[1] - tePosition[0];
vec4 N = vec4( normalize(cross(A, B)) , 0.0);
gFacetNormal = N.xyz;
gTexCoord = teTexCoord[0];
gl_Position = gl_in[0].gl_Position; EmitVertex();
gTexCoord = teTexCoord[1];
gl_Position = gl_in[1].gl_Position; EmitVertex();
gTexCoord = teTexCoord[2];
gl_Position = gl_in[2].gl_Position; EmitVertex();
EndPrimitive();
}
//FRAGMENT
#version 430
layout(location = 0) out vec4 fragColor;
in vec3 gFacetNormal;
in vec2 gTexCoord;
uniform float lit;
uniform vec3 light;
uniform sampler2D texHeight;
void main() {
#ifndef ORANGE_PURPLE
vec3 color = gl_FrontFacing ? vec3(1.0,0.0,0.0) : vec3(0.0,0.0,1.0);
#else
vec3 color = gl_FrontFacing ? vec3(1.0,0.6,0.0) : vec3(0.6,0.0,1.0);
#endif
if (lit > 0.5) {
color = texture(texHeight, gTexCoord).xyz;
vec3 N = normalize(gFacetNormal);
vec3 L = light;
float df = abs(dot(N,L));
color = df * color;
fragColor = vec4(color,1.0);
}
else {
fragColor = vec4(color,1.0);
}
}
It would be nice if someone could help me on that one.
Thanks to #AndonM.Coleman I solved the matter
In fact: the output gFacetNormal is only defined for your first vertex in the geometry shader. Outputs have to be set after every EmitVertex (...) as-per the GLSL specification, or they will be undefined. Many implementations re-use the last value set, but you cannot rely on that behavior if you want this to work portably. You need to set gFacetNormal once before every EmitVertex. void EmitVertex () - "Emits the current values of output variables to the current output primitive. On return from this call, the values of output variables are undefined."
Stupid of me not to notice that, but here is the working code:
//VERTEX
#version 430
layout(location = 0) in vec4 vertex;
layout(location = 1) in vec4 normal;
layout(location = 2) in vec2 texCoord;
out vec3 vPosition;
out vec3 vNormal;
out vec2 vTexCoord;
void main() {
vPosition = vertex.xyz;
vNormal = normal.xyz;
vTexCoord = texCoord;
}
//TESSELLATION CONTROL
#version 430
layout(vertices = 3) out;
in vec3 vPosition[];
in vec3 vNormal[];
in vec2 vTexCoord[];
out vec3 tcPosition[];
out vec3 tcNormal[];
out vec2 tcTexCoord[];
uniform float innerTessLevel;
uniform float outerTessLevel;
void main(){
float inTess = innerTessLevel;
float outTess = outerTessLevel;
tcPosition[gl_InvocationID] = vPosition[gl_InvocationID];
tcNormal[gl_InvocationID] = vNormal[gl_InvocationID];
tcTexCoord[gl_InvocationID] = vTexCoord[gl_InvocationID];
if(gl_InvocationID == 0) {
gl_TessLevelInner[0] = inTess;
gl_TessLevelInner[1] = inTess;
gl_TessLevelOuter[0] = outTess;
gl_TessLevelOuter[1] = outTess;
gl_TessLevelOuter[2] = outTess;
gl_TessLevelOuter[3] = outTess;
}
}
//TESSELLATION EVALUATION
#version 430
layout(triangles, equal_spacing, ccw) in;
in vec3 tcPosition[];
in vec3 tcNormal[];
in vec2 tcTexCoord[];
out vec3 tePosition;
out vec2 teTexCoord;
out vec3 teNormal;
uniform mat4 ModelViewProjection;
uniform mat4 ModelView;
uniform sampler2D texHeight;
void main(){
vec3 p0 = gl_TessCoord.x * tcPosition[0];
vec3 p1 = gl_TessCoord.y * tcPosition[1];
vec3 p2 = gl_TessCoord.z * tcPosition[2];
vec3 pos = p0 + p1 + p2;
vec3 n0 = gl_TessCoord.x * tcNormal[0];
vec3 n1 = gl_TessCoord.y * tcNormal[1];
vec3 n2 = gl_TessCoord.z * tcNormal[2];
vec3 normal = normalize(n0 + n1 + n2);
vec2 tc0 = gl_TessCoord.x * tcTexCoord[0];
vec2 tc1 = gl_TessCoord.y * tcTexCoord[1];
vec2 tc2 = gl_TessCoord.z * tcTexCoord[2];
teTexCoord = tc0 + tc1 + tc2;
float height = texture(texHeight, teTexCoord).x;
pos += normal * (height * 0.5f);
gl_Position = ModelViewProjection * vec4(pos, 1);
teNormal = vec3(ModelView * vec4(normal,0.0)).xyz;
tePosition = vec3(ModelView * vec4(pos,1.0)).xyz;
}
//GEOMETRY
#version 430
layout(triangles) in;
layout(triangle_strip, max_vertices = 3) out;
uniform mat4 ModelView;
in vec3 tePosition[3];
in vec2 teTexCoord[3];
in vec3 teNormal[3];
out vec3 gFacetNormal;
out vec2 gTexCoord;
void main() {
gFacetNormal = teNormal[0];
gTexCoord = teTexCoord[0];
gl_Position = gl_in[0].gl_Position; EmitVertex();
gFacetNormal = teNormal[1];
gTexCoord = teTexCoord[1];
gl_Position = gl_in[1].gl_Position; EmitVertex();
gFacetNormal = teNormal[2];
gTexCoord = teTexCoord[2];
gl_Position = gl_in[2].gl_Position; EmitVertex();
EndPrimitive();
}
//FRAGMENT
#version 430
layout(location = 0) out vec4 fragColor;
in vec3 gFacetNormal;
in vec2 gTexCoord;
uniform float lit;
uniform vec3 light;
uniform sampler2D texHeight;
void main() {
#ifndef ORANGE_PURPLE
vec3 color = gl_FrontFacing ? vec3(1.0,0.0,0.0) : vec3(0.0,0.0,1.0);
#else
vec3 color = gl_FrontFacing ? vec3(1.0,0.6,0.0) : vec3(0.6,0.0,1.0);
#endif
if (lit > 0.5) {
color = texture(texHeight, gTexCoord).xyz;
vec3 N = normalize(gFacetNormal);
vec3 L = light;
float df = abs(dot(N,L));
color = df * color;
fragColor = vec4(color,1.0);
}
else {
fragColor = vec4(color,1.0);
}
}