I have problem with multiply shaders on my one object.
That's my render code:
#include "MeshRenderer.h"
ForwardAmbient* shader1;
ForwardDirectional* shader2;
MeshRenderer::MeshRenderer(Obj& obj) :
meshObject(obj)
{
shader1 = new ForwardAmbient(vec3(1, 1, 1));
shader2 = new ForwardDirectional(vec3(1, 0, 0), vec3(1, 1, 1));
}
MeshRenderer::~MeshRenderer()
{
}
void MeshRenderer::Render(RenderingCore* rc)
{
//for (Shader* shader : meshObject.shaders)
//{
//}
shader1->Bind();
shader1->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
shader2->Bind();
shader2->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
/*
meshObject.shader->Bind();
meshObject.shader->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
*/
}
ambient light.vs:
#version 120
attribute vec3 position;
attribute vec2 texCoord;
attribute vec3 normal;
varying vec2 texCoord0;
uniform mat4 transform;
uniform mat4 projection;
void main()
{
gl_Position = (projection * transform) * vec4(position, 1);
texCoord0 = texCoord;
}
ambient light.fs:
#version 120
varying vec2 texCoord0;
uniform vec3 ambientLight;
uniform float alpha;
uniform sampler2D sampler;
void main()
{
gl_FragColor = texture2D(sampler, texCoord0.xy) * vec4(ambientLight, alpha);
}
directional light.vs
#version 120
attribute vec3 position;
attribute vec2 texCoord;
attribute vec3 normal;
varying vec2 texCoord0;
varying vec3 normal0;
varying vec3 worldPos0;
uniform mat4 transform;
uniform mat4 projection;
void main()
{
gl_Position = (projection * transform) * vec4(position, 1);
texCoord0 = texCoord;
normal0 = (transform * vec4(normal, 0)).xyz;
worldPos0 = (transform * vec4(position, 1)).xyz;
}
directional light.fs
#version 120
varying vec2 texCoord0;
varying vec3 normal0;
varying vec3 worldPos0;
uniform vec3 color;
uniform float alpha;
uniform vec3 direction;
uniform float specularIntensity;
uniform float specularPower;
uniform vec3 eyePosition;
uniform sampler2D sampler;
vec4 calcLight(vec3 color, float alpha, vec3 direction, vec3 normal)
{
float diffuseFactor = dot(normal, -direction);
vec4 diffuseColor = vec4(0,0,0,0);
vec4 specularColor = vec4(0,0,0,0);
if(diffuseFactor > 0)
{
diffuseColor = vec4(color, 1.0) * diffuseFactor;
vec3 directionToEye = normalize(eyePosition - worldPos0);
vec3 reflectDirection = normalize(reflect(direction, normal));
float specularFactor = dot(directionToEye, reflectDirection);
specularFactor = pow(specularFactor, specularPower);
if(specularFactor > 0)
{
specularColor = vec4(color, 1.0) * specularIntensity * specularFactor;
}
}
return diffuseColor + specularColor;
}
vec4 calcDirectionalLight(vec3 color, float alpha, vec3 direction, vec3 normal)
{
return calcLight(color, alpha, -direction, normal);
}
void main()
{
gl_FragColor = texture2D(sampler, texCoord0.xy) * calcDirectionalLight(color, 1, direction, normalize(normal0));
}
Here is the result:
http://imgur.com/Bawny2P
Only ambient light is render, directional light no
Your problem is this:
shader1->Bind();
shader1->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
shader2->Bind();
shader2->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
OpenGL doesn't know what "object" are. It just draws points, lines and triangles, one at a time. To sort out depth overlap the depth buffer method is used. When you use exactly the same drawing call (meshObject.mesh->Render) with all the same vertex setup and depth testing enabled then one of the two draw calls will win over the other.
Also, more importantly, drawing calls don't "stack". You simply can not combine shaders simply by drawing the same thing multiple times; it may sort of work for additive processes. But that's barking up the wrong tree: Instead of saving additional work, you're duplicating the amount of work to be done.
What you should do instead is merging the two shaders into single one and draw the geometry only once, with the merged shader.
Related
I'm drawing some static geometry (a sphere, a cube, etc.) together with some dynamic geometry (a rotating torus.)
I can see that there is a problem because specular lighting on the torus is static and the torus is rendered dark when the rotation angle changes...
I'm targeting OpenGL 2.1 (desktop), OpenGL ES2 (mobile) and WebGL1 (web). Here is a gist with the full code. There is also a WebGL demo.
The framework used is chronotext-cross. It provides a level of abstraction above GL. It should be straightforward to understand. In any case I can provide pointers, as the author.
The C++ code, abridged:
void Sketch::setup()
{
Box()
.setFrontFace(CCW)
.setColor(0.75f, 0.75f, 0.75f, 1)
.setSize(300, 5, 300)
.append(geometryBatch, Matrix().translate(-150, -5, -150));
Sphere()
.setFrontFace(CCW)
.setColor(0.25f, 1.0f, 0.0f, 1)
.setSectorCount(60)
.setStackCount(30)
.setRadius(40)
.append(geometryBatch, Matrix().translate(-75, -40, 100));
Torus()
.setFrontFace(CCW)
.setSliceCount(20)
.setLoopCount(60)
.setInnerRadius(12)
.setOuterRadius(48)
.append(torusBatch, Matrix());
}
void Sketch::resize()
{
camera
.setFov(45)
.setClip(0.1f, 1000.0f)
.setWindowSize(windowInfo.size);
}
void Sketch::draw()
{
camera.getViewMatrix()
.setIdentity()
.scale(1, -1, 1)
.translate(0, 0, -400)
.rotateX(-30 * D2R)
.rotateY(15 * D2R);
State state;
state
.setShader(shader)
.setShaderMatrix<MODEL>(Matrix())
.setShaderMatrix<VIEW>(camera.getViewMatrix())
.setShaderMatrix<PROJECTION>(camera.getProjectionMatrix())
.setShaderMatrix<NORMAL>(camera.getNormalMatrix())
.setShaderUniform("u_eye_position", camera.getEyePosition())
.setShaderUniform("u_light_position", camera.getEyePosition())
.setShaderUniform("u_shininess", 50.0f)
.apply();
geometryBatch.flush();
Matrix modelMatrix;
modelMatrix
.translate(75, -60, 100)
.rotateY(clock()->getTime());
state
.setShaderMatrix<MODEL>(modelMatrix)
.apply();
torusBatch.flush();
}
The vertex shader:
attribute vec4 a_position;
attribute vec3 a_normal;
attribute vec4 a_color;
attribute vec2 a_coord;
uniform mat4 u_model_matrix;
uniform mat4 u_view_matrix;
uniform mat4 u_projection_matrix;
uniform mat3 u_normal_matrix;
uniform vec3 u_eye_position;
uniform vec3 u_light_position;
varying vec3 v_normal;
varying vec4 v_color;
varying vec2 v_coord;
varying vec3 v_surface_to_light;
varying vec3 v_surface_to_view;
void main() {
v_normal = u_normal_matrix * a_normal;
v_color = a_color;
v_coord = a_coord;
v_surface_to_light = (u_view_matrix * (vec4(u_light_position, 1.0) - a_position)).xyz;
v_surface_to_view = (u_view_matrix * (vec4(u_eye_position, 1.0) - a_position)).xyz;
gl_Position = u_projection_matrix * u_view_matrix * u_model_matrix * a_position;
}
The fragment shader:
#ifdef GL_ES
precision highp float;
#endif
uniform sampler2D u_sampler;
uniform float u_shininess;
varying vec3 v_normal;
varying vec4 v_color;
varying vec2 v_coord;
varying vec3 v_surface_to_light;
varying vec3 v_surface_to_view;
void main() {
vec3 normal = normalize(v_normal);
vec3 surfaceToLightDirection = normalize(v_surface_to_light);
vec3 surfaceToViewDirection = normalize(v_surface_to_view);
vec3 halfVector = normalize(surfaceToLightDirection + surfaceToViewDirection);
float specular = 0.0;
float light = dot(normal, surfaceToLightDirection);
if (light > 0.0) {
specular = pow(dot(normal, halfVector), u_shininess);
}
vec4 color = v_color * texture2D(u_sampler, v_coord);
gl_FragColor = vec4(color.rgb * light + specular, 1.0);
}
I found the solution: passing a new normal matrix (extracted from the model-view matrix) to the shader when drawing the dynamic mesh.
Matrix modelMatrix;
modelMatrix
.translate(75, -60, 100)
.rotateY(clock()->getTime());
Matrix modelViewMatrix = modelMatrix * camera.getViewMatrix();
state
.setShaderMatrix<MODEL>(modelMatrix)
.setShaderMatrix<NORMAL>(modelViewMatrix.getNormalMatrix())
.apply();
struct quadricObj {
GLUquadricObj* obj;
GLenum drawmode{ GLU_FILL };
GLdouble radius{1.0};
GLint slices{20};
GLint stacks{20};
glm::vec3 col{ 1.0,0.0,0.0 };
std::vector<glm::mat4> M;
glm::mat4 world_M() {
glm::mat4 WM(1.0f);
std::for_each(this->M.begin(), this->M.end(), [&WM](glm::mat4& m) { WM *= m; });
//M0*M1*M2 TRS
return WM;
}
GLvoid draw() {
gluQuadricDrawStyle(this->obj, this->drawmode);
glUniformMatrix4fv(worldLoc, 1, GL_FALSE, glm::value_ptr(this->world_M()));
glColor4f(this->col.r, this->col.g, this->col.b, 1.0f); // doesn't work.
gluSphere(this->obj, this->radius, this->slices, this->stacks);
}
};
This is my struct for use quadricObj. I think glColor4f has to work but doesn't.
quadrics are staying black.
How can I color quadrics in GL?
#version 330
in vec3 v_normal;
in vec2 v_texCoord;
in vec3 v_color;
in vec3 fragPos;
out vec4 gl_FragColor;
uniform vec3 lightColor;
uniform vec3 lightPos;
uniform vec3 viewPos;
uniform float ambientLight;
uniform int shine;
void main(void)
{
vec3 ambient = clamp(ambientLight*lightColor,0.0,1.0);
vec3 normalVector = normalize(v_normal);
vec3 lightDir = normalize(lightPos-fragPos);
float diffuseLight = max(dot(normalVector,lightDir),0.0);
vec3 diffuse = clamp(diffuseLight * lightColor,0.0,1.0);
vec3 viewDir = normalize(viewPos-fragPos);
vec3 reflectDir = reflect(-lightDir,normalVector);
float specularLight = max(dot(viewDir,reflectDir),0.0);
specularLight = pow(specularLight,shine);
vec3 specular = clamp(specularLight*lightColor,0.0,1.0);
vec3 result = (ambient+diffuse)*v_color+specular*(0.8,0.8,0.8);
gl_FragColor = vec4(result,1.0);
}
I edit my fragment shader contain phong model. this can work with gluSphere too? or not? I'm using vertex shader too. which has inpos,col,nor,tex. and out also.
gluSphere cannot be used together with user defined vertex shader input variables (attribute). You are limited to a GLSL 1.20 vertex shader and the Vertex Shader Built-In Attributes. You can combine a GLSL 1.20 vertex shader with your fragment shader.
A suitable vertex shader can look like this:
#version 120
varying vec3 v_normal;
varying vec2 v_texCoord;
varying vec3 v_color;
varying vec3 fragPos;
uniform mat4 worldMatrix; // the matrix with the location worldLoc
void main()
{
v_color = gl_Color.rgb;
v_texCoord = gl_MultiTexCoord0.st;
v_normal = mat3(worldMatrix) * gl_Normal;
fragPos = (worldMatrix * gl_Vertex).xyz;
gl_Position = gl_ProjectionMatrix * worldMatrix * gl_Vertex;
}
sorry, I am a new on opengl es and processing
below processing and shaders output only background
PShader Gouraud,Phong;
rocket = loadShape("rocket.obj");
rocket.setFill(color(800, 0, 0));
Gouraud= loadShader("gouraudfragment.glsl","gouraudvertex.glsl");
Phong= loadShader("phongfragment.glsl","phongvertex.glsl");
background(0);
pushMatrix();
shader(Gouraud);
translate(130,height/2.0);
rotateY(rc);
rotateX(0.4);
noStroke();
fill(#800080);
box(100);
rc+=(0.02+speedCube);
rc*=dirCube;
popMatrix();
pushMatrix();
shader(Gouraud);
translate(width/2, height/2 + 100, -200);
rotateZ(PI);
rotateY(rr);
shape(rocket,100,100);
rr +=( 0.02+speedRocket);
rr*=dirRocket;
popMatrix();
vertex shader
varying vec3 N;
varying vec3 v;
varying vec4 diffuse;
varying vec4 spec;
attribute vec4 position;
attribute vec3 normal;
uniform mat4 modelview;
uniform mat4 projectionMatrix;
uniform mat3 normalMatrix;
uniform vec4 lightPosition;
uniform vec3 lightAmbient;
uniform vec3 lightDiffuse;
uniform vec3 lightSpecular;
uniform float SpecularPower;
void main()
{
vec4 diffuse;
vec4 spec;
vec4 ambient;
v = vec3(modelview * position);
N = normalize(normalMatrix * normal);
gl_Position = projectionMatrix * position;
vec3 L = normalize(lightPosition.xyz - v);
vec3 E = normalize(-v);
vec3 R = normalize(reflect(-L,N));
ambient = vec4(lightAmbient,100.0);
diffuse = vec4(clamp( lightDiffuse * max(dot(N,L), 0.0) , 0.0, 1.0 ) ,100.0);
spec = vec4(clamp (lightSpecular * pow(max(dot(R,E),0.0),0.3*SpecularPower) , 0.0, 1.0 ),100.0);
color = ambient + diffuse + spec;
}
fragment shader
void main()
{
gl_FragColor = color;
}
please help!
before apply gouraud shading
after apply gouraud shading
The prcessing load the obj and the draw a cube and apply a gouraud shader, but after that only backgroud are shown, the obj loaded and cube is gone. nothing shown!
the shader doesn't even compile and link. The vertex shader has 1 varying output (color), so the framgent shader needs the input varying vec4 color;.
varying vec4 color;
When you set the clip space position, then the vertex coordinate has to be transformed by the model view and projection matrix:
gl_Position = projectionMatrix * modelview * position;
The types specifications of v and N are missing and the types of ambient, diffuse and spec are vec4 rather than vec3.
Vertex shader:
attribute vec4 position;
attribute vec3 normal;
varying vec4 color;
uniform mat4 modelview;
uniform mat4 projectionMatrix;
uniform mat3 normalMatrix;
uniform vec4 lightPosition;
uniform vec3 lightAmbient;
uniform vec3 lightDiffuse;
uniform vec3 lightSpecular;
uniform float SpecularPower;
void main()
{
vec3 v = vec3(modelview * position);
vec3 N = normalize(normalMatrix * normal);
gl_Position = projectionMatrix * modelview * position;
vec3 L = normalize(lightPosition.xyz - v);
vec3 E = normalize(-v);
vec3 R = normalize(reflect(-L,N));
vec4 ambient = vec4(lightAmbient,100.0);
vec4 diffuse = vec4(clamp( lightDiffuse * max(dot(N,L), 0.0) , 0.0, 1.0 ) ,100.0);
vec4 spec = vec4(clamp (lightSpecular * pow(max(dot(R,E),0.0),0.3*SpecularPower) , 0.0, 1.0 ),100.0);
color = ambient + diffuse + spec;
}
Fragment shader:
varying vec4 color;
void main()
{
gl_FragColor = color;
}
Of course you have to set at least an ambient light source ambientLight().
You can use a directionalLight(), pointLight() or spotLight(), too.
But note, your shader can handle 1 light source only. More the 1 light source would gain
OpenGL error 1282 at top endDraw(): invalid operation
If you want to use more than 1 light source then you would have to use uniform arrays int the vertex shader for lightPosition, lightAmbient, lightDiffuse, and lightSpecular. See Types of shaders in Processing(https://processing.org/tutorials/pshader/)
I'm making some first steps in webgl programming. Created a simple setup following this tutorial. Managed to add a few things of my own, though stumbled with adding light, particularly - specular light.
As I assume, most of it would be implemented in my fragment shader, and maybe some additions in the vertex shader and the Light module. So that's the code I provide below.
Vertex shader:
attribute vec3 position;
attribute vec3 normal;
attribute vec2 uv;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
varying vec3 vNormal;
varying vec2 vUv;
void main() {
vUv = uv;
vNormal = (model * vec4(normal, 0.)).xyz;
gl_Position = projection * view * model * vec4(position, 1.);
}
Fragment shader:
#ifdef GL_ES
precision highp float;
#endif
uniform vec3 lightDirection;
uniform float ambientLight;
uniform sampler2D diffuse;
varying vec3 vNormal;
varying vec2 vUv;
void main() {
float lightness = -clamp(dot(normalize(vNormal), normalize(lightDirection)), -1., 0.);
lightness = ambientLight + (1. - ambientLight) * lightness;
gl_FragColor = vec4(texture2D(diffuse, vUv).rgb * lightness, 1.);
}
Light.js module:
function Light () {
this.lightDirection = new Vector3(-1, -1, -1)
this.ambientLight = 0.3
}
Light.prototype.use = function (shaderProgram) {
var dir = this.lightDirection
var gl = shaderProgram.gl
gl.uniform3f(shaderProgram.lightDirection, dir.x, dir.y, dir.z)
gl.uniform1f(shaderProgram.ambientLight, this.ambientLight)
}
I would really appreciate your suggestions here. Thanks in advance!
The most common and simplest light models are the Phong reflection model or the Blinn–Phong model model.
The following shader code, is based on your original code and implements the Blinn–Phong model model. In compare to your code, the light calculations are done in view space, because the specular highlight depends on the view position, which is (0, 0, 0) in view space. So the light direction has to be transformed to view space.
Vertex shader:
attribute vec3 position;
attribute vec3 normal;
attribute vec2 uv;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
uniform vec3 lightDirection;
varying vec3 vPos;
varying vec3 vNormal;
varying vec2 vUv;
varying vec3 lightDirectionView;
void main()
{
lightDirectionView = (view * vec4(lightDirection, 0.)).xyz;
mat4 modelView = view * model;
vec4 viewPos = modelView * vec4(position, 1.0)
vPos = viewPos.xyz;
vUv = uv;
vNormal = (modelView * vec4(normal, 0.)).xyz;
gl_Position = projection * viewPos;
}
Fragemnt shader:
#ifdef GL_ES
precision highp float;
#endif
uniform float shininess;
uniform float ambientLight;
uniform sampler2D diffuse;
varying vec3 vPos;
varying vec3 vNormal;
varying vec2 vUv;
varying vec3 lightDirectionView;
void main()
{
vec3 color = texture2D(diffuse, vUv).rgb;
vec3 N = normalize( vNormal );
vec3 L = normalize( -lightDirectionView );
vec3 V = normalize( -vPos );
vec3 H = normalize( V + L );
float NdotL = dot(N, L);
float NdotH = dot(N, H);
float kDiffuse = max(0.0, NdotL);
// float kSpecular = (shininess + 2.0) * pow(max(0.0, NdotH), shininess) / (2.0 * 3.14159265);
float kSpecular = pow(max(0.0, NdotH), shininess);
vec3 light_col = color * (kDiffuse + kSpecular);
gl_FragColor = vec4(light_col, 1.0);
}
The value of the uniform shininess has to be a positive value in range [1, 100].
See also Phong and Gouraud Shading WebGL.
the title says it all.. using opengls built in lighting system, specularlight does not increase or decrease with distance from the object, but by shader implementation does.
Vertex Shader:
#version 330
layout (location = 0) in vec3 position;
layout (location = 1) in vec2 texCoord;
layout (location = 2) in vec3 normal;
out vec2 texCoord0;
out vec3 normal0;
out vec3 worldPos0;
uniform mat4 transform;
uniform mat4 normalRotation;
uniform mat4 transformProjected;
void main()
{
gl_Position = transformProjected * vec4(position, 1.0);
texCoord0 = texCoord;
normal0 = normalize((normalRotation * vec4(normal, 0.0))).xyz;
worldPos0 = (transform * vec4(position, 1.0)).xyz;
}
Fragment Shader:
#version 330
in vec2 texCoord0;
in vec3 normal0;
in vec3 worldPos0;
out vec4 fragColor;
struct BaseLight
{
vec3 colorDiffuse;
vec3 colorSpecular;
float intensityDiffuse;
};
struct DirectionalLight
{
BaseLight base;
vec3 direction;
};
uniform vec3 tint;
uniform sampler2D sampler;
uniform vec3 eyePos; // camera pos
uniform vec3 ambientLight;
uniform vec3 emissiveLight;
//material
uniform float specularIntensity;
uniform float specularPower;
uniform DirectionalLight directionalLight;
vec4 calcLight(BaseLight base,vec3 direction, vec3 normal)
{
float diffuseFactor = dot(normal, -direction);
vec4 diffuseColorFinal = vec4(0,0,0,0);
vec4 specularColorFinal = vec4(0,0,0,0);
if(diffuseFactor > 0)
{
diffuseColorFinal = vec4(base.colorDiffuse,1) * diffuseFactor * base.intensityDiffuse;
vec3 directionToEye = normalize(eyePos - worldPos0);
vec3 reflectDirection = normalize(reflect(direction, normal));
float specularFactor = dot(directionToEye, reflectDirection);
specularFactor = pow(specularFactor, specularPower);
if(specularFactor > 0)
specularColorFinal = vec4(base.colorSpecular,1) * specularFactor * specularIntensity;
}
//
return diffuseColorFinal + specularColorFinal;
}
void main()
{
vec4 colorD = texture(sampler, texCoord0.xy) * vec4(tint,1);
vec3 normal = normal0;
vec4 totalLight = vec4(ambientLight,1) + vec4(emissiveLight,1);
totalLight += calcLight(directionalLight.base,-directionalLight.direction,normal);
fragColor = colorD * totalLight;
}
As you can see from the 2 images the specular light takes up a larger surface area the farther the camera gets from the plane.In my test with opengls built in lighting, this doesnt happen. is there a way to fix this? im new to lighting, maybe this is normal for directional light sources? thanks for the help!
Im also setting my eyePos uniform to my cameraPos. i dont know if that helps.
Basically you need to have distance between the fragment and the light dist . This can be a problem for directional light though because you have only the direction and distant is assumed to be infinite. Maybe switch to point light?
when youo have the 'dist' you use a formula
att = 1.0 / (Kc + Kl*dist + Kq*dist^2)
Kc - constant attenuation
Kl - linear attenuation
Kq - quadratic attenuation
simpler version (only Kq used, rest set to 1.0):
float attenuation = 1.0 / (1.0 + light.attenuation * pow(distanceToLight, 2));
then in the lighting equation you basically multiply calculated color by this att factor:
vec4 finalColor = ambient + (diffuseColorFinal + specularColorFinal)*att
http://www.ozone3d.net/tutorials/glsl_lighting_phong_p4.php#part_4
http://tomdalling.com/blog/modern-opengl/07-more-lighting-ambient-specular-attenuation-gamma/