Related
I'm porting an openGL application to webassembly using Emscripten. I've written a bunch of shaders in GLSL (330) for the native version. However for the webversion I need shaders written in GLSL ES (300 es). How would I go about converting my shaders from GLSL to GLSL ES?
Possibilities I have considered so far:
GLSL -> SPIR-V -> GLSL ES,
having a bunch of #ifdef statements in the GLSL code in order to make blocks of code only execute for GLSL ES or GLSL,
writing custom C++ code that dynamically creates GLSL / GLSL ES code depending on what you need
simply having two nearly identical copies of all the shaders, one in GLSL and the other in GLSL ES
Example of GLSL vertex shader:
#version 330 core
#define NR_LIGHTS 10
layout (location = 0) in vec3 position;
layout (location = 1) in vec3 normal;
out vec3 normalViewSpace;
out vec3 posViewSpace;
out vec2 textureCoords;
out vec4 positionsLightSpace[NR_LIGHTS];
uniform mat4 modelMatrix;
uniform mat4 viewMatrix;
uniform mat4 projectionMatrix;
uniform mat3 normalMatrix;
uniform mat4 lightMatrices[NR_LIGHTS];
void main()
{
vec4 posViewSpaceV4;
posViewSpaceV4 = viewMatrix * modelMatrix * vec4(position, 1.0);
posViewSpace = posViewSpaceV4.xyz;
gl_Position = projectionMatrix * posViewSpaceV4;
normalViewSpace = mat3(viewMatrix) * normalMatrix * normal;
for( int i = 0; i
Example of GLSL fragment shader:
#version 330 core
#define NR_LIGHTS 10
struct Material {
vec3 ambient;
vec3 diffuse;
vec3 specular;
float shininess;
float alpha;
};
struct Light {
vec3 posViewSpace;
vec3 ambient;
vec3 diffuse;
vec3 specular;
float constant;
float linear;
float quadratic;
vec3 directionViewSpace;
float cutOff;
float outerCutOff;
sampler2D shadowMap;
};
out vec4 FragColor;
in vec3 normalViewSpace;
in vec3 posViewSpace;
in vec4 positionsLightSpace[NR_LIGHTS];
uniform Material material;
uniform Light lights[NR_LIGHTS];
float shadowCalculation(vec4 posLightSpace, sampler2D shadowMap, Light light)
{
// perform perspective divide
vec3 projCoords = posLightSpace.xyz / posLightSpace.w; // range [-1, 1]
// transform range [0, 1]
projCoords = projCoords * 0.5 + 0.5;
float closestDepth = texture(shadowMap, projCoords.xy).r;
float currentDepth = projCoords.z;
vec3 lightDir = normalize(light.posViewSpace - posViewSpace);
float bias = max(0.00005 * (1.0 - dot(normalViewSpace, lightDir)), 0.000005); // solves shadow acne
float shadow = currentDepth - bias > closestDepth ? 1.0 : 0.0;
return shadow;
}
vec3 calcSpotLight( Light light, vec3 normal, vec3 position, float shadow) // normal and position in view space, although this function should not care about which space it's in
{
vec3 result = vec3(0.0, 0.0, 0.0);
vec3 lightDir = normalize(light.posViewSpace - position);
float theta = dot(lightDir, normalize(-light.directionViewSpace));
float epsilon = light.cutOff - light.outerCutOff;
float intensity = clamp((theta - light.outerCutOff) / epsilon, 0.0, 1.0); // interpolate between inner and outer cutOff and clamp to 0 and 1
if( intensity > 0 ) // if inside spot radius
{
// attenuation
float distance = length(light.posViewSpace - position);
float attenuation = 1.0 / (light.constant + light.linear * distance + light.quadratic * (distance * distance));
if( attenuation > 0.001 )
{
// ambient
vec3 ambient = material.ambient * light.ambient;
// diffuse
vec3 norm = normalize(normalViewSpace);
float diff = max(dot(norm, lightDir), 0.0);
vec3 diffuse = diff * material.diffuse * light.diffuse;
// specular
vec3 viewDir = normalize(-position); // in view space the camera is at (0, 0, 0)
vec3 reflectDir = reflect(-lightDir, norm); // reflect function expect vector FROM light source TO position
float spec = pow(max(dot(viewDir, reflectDir), 0.0), material.shininess);
vec3 specular = material.specular * spec * light.specular;
// result
result = intensity * attenuation * (ambient + (1.0 - shadow) * (diffuse + specular));
}
}
return result;
}
void main()
{
vec3 result = material.ambient * 0.08;
for( int i = 0; i
I'm having issues with passing my texture coordinates correctly through a geometry shader. My texture coordinates getting all messed up when it ends up in my fragment shader.
Here's the vertex shader:
#version 330 core
layout(location = 0) in vec3 position;
layout(location = 1) in vec3 normal;
layout(location = 2) in vec2 texCoord;
out VS_OUT
{
vec3 v_Normal;
vec2 v_TexCoord;
vec3 v_FragPos;
} vs_out;
uniform mat4 u_Model;
uniform mat4 u_View;
uniform mat4 u_Projection;
void main()
{
gl_Position = u_Projection * u_View * u_Model * vec4(position, 1.0);
vs_out.v_Normal = normalize(mat3(transpose(inverse(u_Model))) * normal);
vs_out.v_TexCoord = texCoord;
vs_out.v_FragPos = vec3(u_Model * vec4(position, 1.0));
}
Geometry shader:
#version 330 core
layout(triangles) in;
layout(triangle_strip, max_vertices = 3) out;
in VS_OUT
{
vec3 v_Normal;
vec2 v_TexCoord;
vec3 v_FragPos;
} gs_in[];
out GS_OUT
{
vec3 v_Normal;
vec2 v_TexCoord;
vec3 v_FragPos;
} gs_out;
uniform float u_Time;
vec4 explode(vec4 position, vec3 normal)
{
float magnitude = 2.0;
vec3 direction = normal * ((sin(u_Time) + 1.0) / 2.0) * magnitude;
return position + vec4(direction, 0.0);
}
vec3 GetNormal()
{
vec3 a = vec3(gl_in[0].gl_Position) - vec3(gl_in[1].gl_Position);
vec3 b = vec3(gl_in[2].gl_Position) - vec3(gl_in[1].gl_Position);
return normalize(cross(a, b));
}
void main()
{
vec3 normal = GetNormal();
gl_Position = explode(gl_in[0].gl_Position, normal);
gs_out.v_Normal = gs_in[0].v_Normal;
gs_out.v_TexCoord = gs_in[0].v_TexCoord;
gs_out.v_FragPos = vec3(explode(vec4(gs_in[0].v_FragPos, 1.0), normal));
EmitVertex();
gl_Position = explode(gl_in[1].gl_Position, normal);
gs_out.v_Normal = gs_in[1].v_Normal;
gs_out.v_TexCoord = gs_in[1].v_TexCoord;
gs_out.v_FragPos = vec3(explode(vec4(gs_in[1].v_FragPos, 1.0), normal));
EmitVertex();
gl_Position = explode(gl_in[2].gl_Position, normal);
gs_out.v_Normal = gs_in[1].v_Normal;
gs_out.v_TexCoord = gs_in[1].v_TexCoord;
gs_out.v_FragPos = vec3(explode(vec4(gs_in[1].v_FragPos, 1.0), normal));
EmitVertex();
EndPrimitive();
}
Fragment shader:
#version 330 core
struct Material
{
sampler2D ambient;
sampler2D diffuse;
sampler2D specular;
sampler2D emissive;
float shininess;
};
struct DirectionalLight {
vec3 direction;
vec3 ambient;
vec3 diffuse;
vec3 specular;
};
struct PointLight {
vec3 position;
vec3 ambient;
vec3 diffuse;
vec3 specular;
float constant;
float linear;
float quadratic;
};
struct SpotLight {
vec3 position;
vec3 direction;
vec3 ambient;
vec3 diffuse;
vec3 specular;
float constant;
float linear;
float quadratic;
float cutOff;
float outerCutOff;
};
out vec4 color;
in GS_OUT
{
vec3 v_Normal;
vec2 v_TexCoord;
vec3 v_FragPos;
} fs_in;
#define NR_POINT_LIGHTS 1
uniform Material u_Material;
uniform DirectionalLight u_DirectionalLight;
uniform PointLight u_PointLights[NR_POINT_LIGHTS];
uniform SpotLight u_SpotLight;
uniform vec3 u_ViewPos;
uniform samplerCube skybox;
vec3 calculateDirectionalLight(DirectionalLight light, Material material, vec2 texCoord, vec3 normal, vec3 viewDirection);
vec3 calculatePointLight(PointLight light, Material material, vec2 texCoord, vec3 normal, vec3 fragPos, vec3 viewDirection);
vec3 calculateSpotLight(SpotLight light, Material material, vec2 texCoord, vec3 normal, vec3 fragPos, vec3 viewDirection);
void main()
{
vec3 lightColor = vec3(0.0);
vec3 normal = normalize(fs_in.v_Normal);
vec3 viewDirection = normalize(u_ViewPos - fs_in.v_FragPos);
lightColor += calculateDirectionalLight(u_DirectionalLight, u_Material, fs_in.v_TexCoord, normal, viewDirection);
for (int i = 0; i < NR_POINT_LIGHTS; i++)
lightColor += calculatePointLight(u_PointLights[i], u_Material, fs_in.v_TexCoord, normal, fs_in.v_FragPos, viewDirection);
lightColor += calculateSpotLight(u_SpotLight, u_Material, fs_in.v_TexCoord, normal, fs_in.v_FragPos, viewDirection);
lightColor += texture(u_Material.ambient, fs_in.v_TexCoord).rgb * texture(skybox, reflect(-viewDirection, normal)).rgb;
color = vec4(lightColor, 1.0);
}
vec3 calculateDirectionalLight(DirectionalLight light, Material material, vec2 texCoord, vec3 normal, vec3 viewDirection)
{
vec3 ambient = light.ambient * vec3(texture(material.diffuse, texCoord));
vec3 lightDirection = normalize(-light.direction);
vec3 diffuse = light.diffuse * vec3(texture(material.diffuse, texCoord)) * max(dot(lightDirection, normal), 0.0);
vec3 reflectDirection = reflect(-lightDirection, normal);
vec3 specular = light.specular * vec3(texture(material.specular, texCoord)) * pow(max(dot(viewDirection, reflectDirection), 0.0), material.shininess);
// vec3 emmisive = texture(material.emmisive, texCoord).rgb;
return vec3(ambient + diffuse + specular);
}
vec3 calculatePointLight(PointLight light, Material material, vec2 texCoord, vec3 normal, vec3 fragPos, vec3 viewDirection)
{
vec3 ambient = light.ambient * vec3(texture(material.diffuse, texCoord));
vec3 lightDirection = normalize(light.position - fragPos);
vec3 diffuse = light.diffuse * vec3(texture(material.diffuse, texCoord)) * max(dot(lightDirection, normal), 0.0);
vec3 reflectDirection = reflect(-lightDirection, normal);
vec3 specular = light.specular * vec3(texture(material.specular, texCoord)) * pow(max(dot(viewDirection, reflectDirection), 0.0), material.shininess);
float distance = length(light.position - fragPos);
float attenuation = 1.0 / (light.constant + light.linear * distance +
light.quadratic * (distance * distance));
ambient *= attenuation;
diffuse *= attenuation;
specular *= attenuation;
// vec3 emmisive = texture(material.emmisive, texCoord).rgb;
return vec3(ambient + diffuse + specular);
}
vec3 calculateSpotLight(SpotLight light, Material material, vec2 texCoord, vec3 normal, vec3 fragPos, vec3 viewDirection)
{
vec3 lightDirection = normalize(light.position - fragPos);
float theta = dot(lightDirection, normalize(-light.direction));
float epsilon = light.cutOff - light.outerCutOff;
float intensity = clamp((theta - light.outerCutOff) / epsilon, 0.0, 1.0);
vec3 ambient = light.ambient * vec3(texture(material.diffuse, texCoord));
vec3 diffuse = light.diffuse * vec3(texture(material.diffuse, texCoord)) * max(dot(lightDirection, normal), 0.0);
vec3 reflectDirection = reflect(-lightDirection, normal);
vec3 specular = light.specular * vec3(texture(material.specular, texCoord)) * pow(max(dot(viewDirection, reflectDirection), 0.0), material.shininess);
// vec3 emmisive = texture(material.emmisive, texCoord).rgb;
float distance = length(light.position - fragPos);
float attenuation = 1.0 / (light.constant + light.linear * distance +
light.quadratic * (distance * distance));
ambient *= intensity * attenuation;
diffuse *= intensity * attenuation;
specular *= intensity * attenuation;
return vec3(ambient + diffuse + specular);
}
Before I've added the geometry shader and passed the interpolated vertex attributes through it:
After passing varying attributes through the geometry shader:
Any ideas what I'm missing?
gl_Position = explode(gl_in[2].gl_Position, normal);
gs_out.v_Normal = gs_in[1].v_Normal;
gs_out.v_TexCoord = gs_in[1].v_TexCoord;
Looks like a copy paste error, you probably meant to put 2 here instead of 1.
I am making a relatively simple render for a physics engine (something similar to this). I am just learning OpenGL and have been following this tutorial. I want my renderer to be able to handle a small number of lights chosen from the types: directional, point, spotlight, and area light. Also I want simple shadows using shadow maps. So for example a scene might contain two spotlights or one directional light or one point light and one spotlight etc. Currently I have one larger shader that handles all the lights together, however now that I am experimenting with shadow maps it seems light it would be better (from a modular design perspective) to have a different shader for each light or at least each light type. I am wondering if this is a reasonable idea from the perspective of efficiency. To make this more concrete my current vertex looks like:
#version 130
in vec3 position;
in vec3 normal;
in vec2 atexture;
out vec3 FragPos;
out vec3 Normal;
out vec2 TexCoord;
out vec4 FragPosLightSpace;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
uniform mat4 lightView;
uniform mat4 lightProjection;
void main()
{
gl_Position = projection * view * model * vec4(position.x, position.y, position.z, 1.0);
FragPos = vec3(model * vec4(position, 1.0));
Normal = normalize(normal);
TexCoord = atexture;
FragPosLightSpace = lightProjection * lightView * vec4(FragPos, 1.0f);
}
and fragment shader:
#version 130
struct Material
{
float shininess;
vec3 ambient;
vec3 diffuse;
vec3 specular;
};
struct DirLight
{
vec3 direction;
vec3 ambient;
vec3 diffuse;
vec3 specular;
};
struct PointLight
{
vec3 position;
float constant;
float linear;
float quadratic;
vec3 ambient;
vec3 diffuse;
vec3 specular;
};
struct SpotLight {
vec3 position;
vec3 direction;
float cutOff;
float outerCutOff;
float constant;
float linear;
float quadratic;
vec3 ambient;
vec3 diffuse;
vec3 specular;
};
struct AreaLight
{
vec3 position;
vec3 ambient;
vec3 diffuse;
vec3 specular;
};
out vec4 FragColor;
in vec3 FragPos;
in vec3 Normal;
in vec2 TexCoord;
in vec4 FragPosLightSpace;
uniform Material material;
uniform DirLight dirLight;
uniform PointLight pointLight;
uniform SpotLight spotLight;
uniform AreaLight areaLight;
uniform vec3 cameraPos;
uniform sampler2D texture1;
uniform sampler2D shadowMap;
float CalcShadow(vec4 FragPosLightSpace);
vec3 CalcDirLight(Material material, DirLight light, vec3 normal, vec3 viewDir);
vec3 CalcPointLight(Material material, PointLight light, vec3 normal, vec3 fragPos, vec3 viewDir);
vec3 CalcSpotLight(Material material, SpotLight light, vec3 normal, vec3 fragPos, vec3 viewDir);
vec3 CalcAreaLight(Material material, AreaLight light);
void main(void)
{
vec3 viewDir = normalize(cameraPos - FragPos);
vec3 finalLight = vec3(0.0f, 0.0f, 0.0f);
finalLight += CalcDirLight(material, dirLight, Normal, viewDir);
finalLight += CalcPointLight(material, pointLight, Normal, FragPos, viewDir);
finalLight += CalcSpotLight(material, spotLight, Normal, FragPos, viewDir);
finalLight += CalcAreaLight(material, areaLight);
FragColor = texture2D(texture1, TexCoord) * vec4(finalLight, 1.0f);
}
float CalcShadow(vec4 fragPosLightSpace)
{
// only actually needed when using perspective projection for the light
vec3 projCoords = fragPosLightSpace.xyz / fragPosLightSpace.w;
// projCoord is in [-1,1] range. Convert it ot [0,1] range.
projCoords = projCoords * 0.5 + 0.5;
float closestDepth = texture(shadowMap, projCoords.xy).r;
float currentDepth = projCoords.z;
float bias = 0.005f;
float shadow = currentDepth - bias > closestDepth ? 1.0 : 0.0;
return shadow;
}
vec3 CalcDirLight(Material material, DirLight light, vec3 normal, vec3 viewDir)
{
vec3 lightDir = normalize(-light.direction);
vec3 reflectDir = reflect(-lightDir, normal);
float ambientStrength = 1.0f;
float diffuseStrength = max(dot(normal, lightDir), 0.0);
float specularStrength = pow(max(dot(viewDir, reflectDir), 0.0), material.shininess);
float shadow = CalcShadow(FragPosLightSpace);
vec3 ambient = light.ambient * material.ambient * ambientStrength;
vec3 diffuse = (1.0f - shadow) * light.diffuse * material.diffuse * diffuseStrength;
vec3 specular = (1.0f - shadow) * light.specular * material.specular * specularStrength;
return (ambient + diffuse + specular);
}
vec3 CalcPointLight(Material material, PointLight light, vec3 normal, vec3 fragPos, vec3 viewDir)
{
vec3 lightDir = normalize(light.position - fragPos);
vec3 reflectDir = reflect(-lightDir, normal);
float ambientStrength = 1.0f;
float diffuseStrength = max(dot(normal, lightDir), 0.0);
float specularStrength = pow(max(dot(viewDir, reflectDir), 0.0f), material.shininess);
float attenuation = 1.0f / (1.0f + 0.01f*pow(length(light.position - fragPos), 2));
vec3 ambient = light.ambient * material.ambient * ambientStrength;
vec3 diffuse = light.diffuse * material.diffuse * diffuseStrength;
vec3 specular = light.specular * material.specular * specularStrength;
ambient *= attenuation;
diffuse *= attenuation;
specular *= attenuation;
return vec3(ambient + diffuse + specular);
}
vec3 CalcSpotLight(Material material, SpotLight light, vec3 normal, vec3 fragPos, vec3 viewDir)
{
vec3 lightDir = normalize(light.position - fragPos);
vec3 reflectDir = reflect(-lightDir, normal);
float ambientStrength = 0.05f;
float diffuseStrength = max(dot(normal, lightDir), 0.0);
float specularStrength = pow(max(dot(viewDir, reflectDir), 0.0f), material.shininess);
float attenuation = 1.0f / (1.0f + 0.01f*pow(length(light.position - fragPos), 2));
float theta = dot(lightDir, normalize(-light.direction));
float epsilon = light.cutOff - light.outerCutOff;
float intensity = clamp((theta - light.outerCutOff) / epsilon, 0.0f, 1.0f);
vec3 ambient = light.ambient * material.ambient * ambientStrength;
vec3 diffuse = light.diffuse * material.diffuse * diffuseStrength;
vec3 specular = light.specular * material.specular * specularStrength;
ambient *= attenuation * intensity;
diffuse *= attenuation * intensity;
specular *= attenuation * intensity;
return vec3(ambient + diffuse + specular);
}
vec3 CalcAreaLight(Material material, AreaLight light)
{
// return vec3(0.0f, 0.0f, 0.0f);
return vec3(2*material.ambient);
}
What I would like to do is separate each light type out to a different shader so instead of having one "ubershader" I would have a directionalLight shader and a spotlight shader etc. Is this a good idea? In particular I am worried that switching shaders multiple times for each render call might be expensive?
Your question is too broad and doesn't fit into SO format. However I will try to answer it, mostly because it is frequently asked by beginners to engine programming.
To manipulate different shader setups for lighting and shadowing you have got 2 standard practices:
"Uber-shader"
The idea behind this is that you have every possible case embedded into this shader. So for example, you want to be able to render up to 4 light sources (I am talking here of forward-rendering), so you insert a for loop with max number of lights, and then pass a uniform (number of lights in the scene) to tell the loop in the real time how many times to iterate. Then, if you enable the shadow pass, you also pass a uniform into the uber-shader to activate an "if" condition for shadow map sampling. As you already can see, this way is quite inefficient. You will end up with complex branching all over your shader, and will have to submit multiple uniforms during runtime to change the shader state. All this impacts performance and usability. Well, you can simplify this a little, by using OpenGL 4.0 subroutines. But generally speaking - don't do it.
Shader permutations
This is a quite industry common way and while it is more complex to design and setup such a system, it pays off on the long run. The idea is that you configure your shaders code, based on use case scenario in run-time, (or if you have offline shader compiler available,then you can even do this in compile time), so at the end you get a shader string contain the code for the specific rendering setup. For example, if your scene has 2 lights + shadows, and a material a renderable object uses diffuse and normal map, then you configure the shaders for that material to generate a code to handle 2 lights, shadow mapping , diffuse and normal maps sampling. It would take me too much time and space here to write in detail how to design and code such a system. But generally speaking, you write a sort of shader templates, full of pre-processor flags for different permutations. You inject pre-processor flags for specific permutation type, then compile shaders and shader program. In top-notch game engines like Unity3D and Unreal, all possible shader permutations are generated already in editor during authoring time. If you roll your own engine, just compose required permutation during runtime and throw that into shader compiler. With long shader strings you will notice a slight freeze during online compilation, but if you cache and then reuse already compiled permutations of the shader programs, you are going to be fine.
Bonus part
You can also, do that as you proposed - prebuild the different variations of shaders, which is effectively my number 2 approach. But your proposal is problematic, because if you wrap a single light rendering logic into separate program, that would mean in case of a scene with 2 light sources:
1 - Render the object with first light source.
2 - Render the object with second light source.
Compose two frames into final result. This already requires 3 render passes and takes you more to the direction of deferred shading, which is a quite advanced technique and not always what you need unless your plan is to develop an engine to deal with huge amounts of geometry and light sources.
I have been following www.learnopengl.com tutorial and tried to load models using the method described. It works until the very end where it asks you to load the nanosuit with 2 points lights. I have copied the exact code from the tutorial with: camera, mesh, model, and shader classes however the issue is with the fragment shader. If I try a simple one like:
#version 330 core
in vec2 TexCoords;
out vec4 color;
uniform sampler2D texture_diffuse1;
void main()
{
color = vec4(texture(texture_diffuse1, TexCoords));
}
, the model is loaded and the textures too. But if try the solution given for 2 points lighting the texture is black:
#version 330 core
struct Material {
sampler2D texture_diffuse1;
sampler2D texture_specular1;
float shininess;
};
/* Note: because we now use a material struct again you want to change your
mesh class to bind all the textures using material.texture_diffuseN instead of
texture_diffuseN. */
struct PointLight {
vec3 position;
float constant;
float linear;
float quadratic;
vec3 ambient;
vec3 diffuse;
vec3 specular;
};
#define NR_POINT_LIGHTS 2
in vec3 fragPosition;
in vec3 Normal;
in vec2 TexCoords;
out vec4 color;
uniform vec3 viewPos;
uniform PointLight pointLights[NR_POINT_LIGHTS];
uniform Material material;
// Function prototypes
vec3 CalcPointLight(PointLight light, Material mat, vec3 normal, vec3 fragPos, vec3 viewDir);
void main()
{
vec3 result;
vec3 viewDir = normalize(viewPos - fragPosition);
vec3 norm = normalize(Normal);
for(int i = 0; i < NR_POINT_LIGHTS; i++)
result += CalcPointLight(pointLights[i], material, norm, fragPosition, viewDir);
color = vec4(result, 1.0f);
}
// Calculates the color when using a point light.
vec3 CalcPointLight(PointLight light, Material mat, vec3 normal, vec3 fragPos, vec3 viewDir)
{
vec3 lightDir = normalize(light.position - fragPos);
// Diffuse shading
float diff = max(dot(normal, lightDir), 0.0);
// Specular shading
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), mat.shininess);
// Attenuation
float distance = length(light.position - fragPos);
float attenuation = 1.0f / (light.constant + light.linear * distance + light.quadratic * (distance * distance));
// Combine results
vec3 ambient = light.ambient * vec3(texture(mat.texture_diffuse1, TexCoords));
vec3 diffuse = light.diffuse * diff * vec3(texture(mat.texture_diffuse1, TexCoords));
vec3 specular = light.specular * spec * vec3(texture(mat.texture_specular1, TexCoords));
ambient *= attenuation;
diffuse *= attenuation;
specular *= attenuation;
return (ambient + diffuse + specular);
}
I also modified the mesh class as described in the fragment shader from:
glUniform1i(glGetUniformLocation(shader.Program, (name + number).c_str()), i);
to:
glUniform1i(glGetUniformLocation(shader.Program, ("material." + name + number).c_str()), i);
Is anyone having the same issue?
It seems like GLSL does not like function. If I take CalcPointLight() code and place it in main(), it works!
#version 330 core
struct Material {
sampler2D texture_diffuse1;
sampler2D texture_specular1;
float shininess;
};
/* Note: because we now use a material struct again you want to change your
mesh class to bind all the textures using material.texture_diffuseN instead of
texture_diffuseN. */
struct PointLight {
vec3 position;
float constant;
float linear;
float quadratic;
vec3 ambient;
vec3 diffuse;
vec3 specular;
};
#define NR_POINT_LIGHTS 2
in vec3 fragPosition;
in vec3 Normal;
in vec2 TexCoords;
out vec4 color;
uniform vec3 viewPos;
uniform PointLight pointLights[NR_POINT_LIGHTS];
uniform Material material;
void main()
{
vec3 result;
vec3 viewDir = normalize(viewPos - fragPosition);
vec3 norm = normalize(Normal);
vec3 lightDir = normalize(pointLights[0].position - fragPosition);
// Diffuse shading
float diff = max(dot(norm, lightDir), 0.0);
// Specular shading
vec3 reflectDir = reflect(-lightDir, norm);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), material.shininess);
// Attenuation
float distance = length(pointLights[0].position - fragPosition);
float attenuation = 1.0f / (pointLights[0].constant + pointLights[0].linear * distance + pointLights[0].quadratic * (distance * distance));
// Combine results
vec3 ambient = pointLights[0].ambient * vec3(texture(material.texture_diffuse1, TexCoords));
vec3 diffuse = pointLights[0].diffuse * diff * vec3(texture(material.texture_diffuse1, TexCoords));
vec3 specular = pointLights[0].specular * spec * vec3(texture(material.texture_specular1, TexCoords));
ambient *= attenuation;
diffuse *= attenuation;
specular *= attenuation;
result = ambient+diffuse+specular;
color = vec4(result, 1.0f);
}
Edit: Found the answer there: GLSL sampler2D in struct. You can't instanciate a struct that contains an opaque type such as sampler2D.
Edit1: You can actually use those variables into a function without passing them as arguments since they are defined globally:
#version 330 core
struct Material {
sampler2D texture_diffuse1;
sampler2D texture_specular1;
float shininess;
};
/* Note: because we now use a material struct again you want to change your
mesh class to bind all the textures using material.texture_diffuseN instead of
texture_diffuseN. */
struct PointLight {
vec3 position;
float constant;
float linear;
float quadratic;
vec3 ambient;
vec3 diffuse;
vec3 specular;
};
#define NR_POINT_LIGHTS 2
in vec3 fragPosition;
in vec3 Normal;
in vec2 TexCoords;
out vec4 color;
uniform vec3 viewPos;
uniform PointLight pointLights[NR_POINT_LIGHTS];
uniform Material material;
vec3 CalcLights(int i);
void main()
{
vec3 result;
for(int i=0; i<NR_POINT_LIGHTS; i++)
result += CalcLights(i);
color = vec4(result, 1.0f);
}
vec3 CalcLights(int i) //viewPos fragPosition pointLights[] Normal material TexCoords
{
vec3 viewDir = normalize(viewPos - fragPosition);
vec3 norm = normalize(Normal);
vec3 lightDir = normalize(pointLights[i].position - fragPosition);
// Diffuse shading
float diff = max(dot(norm, lightDir), 0.0);
// Specular shading
vec3 reflectDir = reflect(-lightDir, norm);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), material.shininess);
// Attenuation
float distance = length(pointLights[i].position - fragPosition);
float attenuation = 1.0f / (pointLights[i].constant + pointLights[i].linear * distance + pointLights[i].quadratic * (distance * distance));
// Combine results
vec3 ambient = pointLights[0].ambient * vec3(texture(material.texture_diffuse1, TexCoords));
vec3 diffuse = pointLights[0].diffuse * diff * vec3(texture(material.texture_diffuse1, TexCoords));
vec3 specular = pointLights[0].specular * spec * vec3(texture(material.texture_specular1, TexCoords));
ambient *= attenuation;
diffuse *= attenuation;
specular *= attenuation;
return (ambient+diffuse+specular);
}
In my lighting scene, for some reason the ambient lighting isn't working at all. The whole model is the same brightness, no matter which way it is facing. I tried getting rid of the attenuation but it still has the same results. Along with that, the specular lighting is always shining, no matter where the camera is. It is supposed to shine based on player position.
Here is a screenshot of the ambient problem: Imgur.com
As you can see, the part of the sphere that is facing away from the light (located at [0.0,4.0,0.0]) is the same color as the part facing the light. The ambient factor is supposed to be 0.2 of the fragment color.
Vertex shader source:
layout(location = 0) in vec3 positions;
layout(location = 1) in vec2 texCoords;
layout(location = 2) in vec3 normals;
out vec3 new_normal;
out vec3 worldPos_out;
out vec2 pass_texCoords;
struct Matrices {
mat4 projection;
mat4 worldMatrix;
mat4 modelMatrix;
mat3 normalMatrix;
};
uniform Matrices mat;
void main(void)
{
pass_texCoords = texCoords;
vec4 newPosition = vec4(positions, 1);
vec4 worldPos = (mat.modelMatrix * newPosition);
mat4 Camera = mat.projection * mat.worldMatrix;
gl_Position = (Camera * worldPos);
new_normal = mat.normalMatrix * normals;
worldPos_out = worldPos.xyz;
}
Fragment shader source:
in vec3 new_normal;
in vec3 worldPos_out;
in vec2 pass_texCoords;
out vec4 outColor;
uniform vec3 viewPos;
#define MAX_LIGHTS 50
struct Material {
sampler2D diffuseMap;
sampler2D specularMap;
vec3 specular;
float shininess;
};
uniform Material material;
struct Light {
vec3 position;
vec3 color;
vec3 ambient;
vec3 diffuse;
vec3 specular;
float radius;
};
uniform Light Lights[MAX_LIGHTS];
uniform int numLights;
struct Math {
float constant;
float linear;
float quadratic;
} math;
vec3 applyPointLight(Light light, vec3 normal, vec3 fragPos, vec3 viewDir, vec3 surfaceColor, vec3 surfaceSpecular) {
vec3 lightDir = normalize(light.position - fragPos);
//Diffuse shading
float diff = max(dot(normal, lightDir), 0.0);
//Specular shading
vec3 reflectDir = reflect(-lightDir, normal);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), material.shininess);
//Attenuation
float distance = length(light.position - fragPos);
float attenuation = 5.0 / (math.constant + math.linear * distance +
math.quadratic * (distance * distance));
vec3 ambient = light.ambient * surfaceColor;
vec3 diffuse = light.diffuse * surfaceColor * light.color;
vec3 specular = light.specular * surfaceSpecular * light.color;
ambient *= attenuation;
diffuse *= attenuation;
specular *= attenuation;
return (ambient + diffuse + specular);
}
void main(void) {
vec3 surfaceColor = vec3(texture(material.diffuseMap, pass_texCoords));
vec3 surfaceSpecular = vec3(texture(material.specularMap, pass_texCoords));
vec3 unitNormal = normalize(new_normal);
vec3 viewDir = normalize(viewPos - worldPos_out);
math.constant = 1.0;
math.linear = 0.09;
math.quadratic = 0.032;
vec3 linearColor;
for(int i = 0; i < numLights; i++)
linearColor += applyPointLight(Lights[i], unitNormal, worldPos_out, viewDir, surfaceColor, surfaceSpecular);
float gamma = 2.2;
vec3 fragColor;
fragColor.rgb = pow(linearColor.rgb, vec3(1.0/gamma));
outColor = vec4(linearColor, 1.0);
}
In your applyPointLight function, you're not using the diff and spec variables, which are presumably the light-dependent changes to diffuse and specular. See if the following works:
vec3 diffuse = light.diffuse * surfaceColor * light.color * diff;
vec3 specular = light.specular * surfaceSpecular * light.color * spec;