Processing output nothing using gouraud shading - glsl

sorry, I am a new on opengl es and processing
below processing and shaders output only background
PShader Gouraud,Phong;
rocket = loadShape("rocket.obj");
rocket.setFill(color(800, 0, 0));
Gouraud= loadShader("gouraudfragment.glsl","gouraudvertex.glsl");
Phong= loadShader("phongfragment.glsl","phongvertex.glsl");
background(0);
pushMatrix();
shader(Gouraud);
translate(130,height/2.0);
rotateY(rc);
rotateX(0.4);
noStroke();
fill(#800080);
box(100);
rc+=(0.02+speedCube);
rc*=dirCube;
popMatrix();
pushMatrix();
shader(Gouraud);
translate(width/2, height/2 + 100, -200);
rotateZ(PI);
rotateY(rr);
shape(rocket,100,100);
rr +=( 0.02+speedRocket);
rr*=dirRocket;
popMatrix();
vertex shader
varying vec3 N;
varying vec3 v;
varying vec4 diffuse;
varying vec4 spec;
attribute vec4 position;
attribute vec3 normal;
uniform mat4 modelview;
uniform mat4 projectionMatrix;
uniform mat3 normalMatrix;
uniform vec4 lightPosition;
uniform vec3 lightAmbient;
uniform vec3 lightDiffuse;
uniform vec3 lightSpecular;
uniform float SpecularPower;
void main()
{
vec4 diffuse;
vec4 spec;
vec4 ambient;
v = vec3(modelview * position);
N = normalize(normalMatrix * normal);
gl_Position = projectionMatrix * position;
vec3 L = normalize(lightPosition.xyz - v);
vec3 E = normalize(-v);
vec3 R = normalize(reflect(-L,N));
ambient = vec4(lightAmbient,100.0);
diffuse = vec4(clamp( lightDiffuse * max(dot(N,L), 0.0) , 0.0, 1.0 ) ,100.0);
spec = vec4(clamp (lightSpecular * pow(max(dot(R,E),0.0),0.3*SpecularPower) , 0.0, 1.0 ),100.0);
color = ambient + diffuse + spec;
}
fragment shader
void main()
{
gl_FragColor = color;
}
please help!
before apply gouraud shading
after apply gouraud shading
The prcessing load the obj and the draw a cube and apply a gouraud shader, but after that only backgroud are shown, the obj loaded and cube is gone. nothing shown!

the shader doesn't even compile and link. The vertex shader has 1 varying output (color), so the framgent shader needs the input varying vec4 color;.
varying vec4 color;
When you set the clip space position, then the vertex coordinate has to be transformed by the model view and projection matrix:
gl_Position = projectionMatrix * modelview * position;
The types specifications of v and N are missing and the types of ambient, diffuse and spec are vec4 rather than vec3.
Vertex shader:
attribute vec4 position;
attribute vec3 normal;
varying vec4 color;
uniform mat4 modelview;
uniform mat4 projectionMatrix;
uniform mat3 normalMatrix;
uniform vec4 lightPosition;
uniform vec3 lightAmbient;
uniform vec3 lightDiffuse;
uniform vec3 lightSpecular;
uniform float SpecularPower;
void main()
{
vec3 v = vec3(modelview * position);
vec3 N = normalize(normalMatrix * normal);
gl_Position = projectionMatrix * modelview * position;
vec3 L = normalize(lightPosition.xyz - v);
vec3 E = normalize(-v);
vec3 R = normalize(reflect(-L,N));
vec4 ambient = vec4(lightAmbient,100.0);
vec4 diffuse = vec4(clamp( lightDiffuse * max(dot(N,L), 0.0) , 0.0, 1.0 ) ,100.0);
vec4 spec = vec4(clamp (lightSpecular * pow(max(dot(R,E),0.0),0.3*SpecularPower) , 0.0, 1.0 ),100.0);
color = ambient + diffuse + spec;
}
Fragment shader:
varying vec4 color;
void main()
{
gl_FragColor = color;
}
Of course you have to set at least an ambient light source ambientLight().
You can use a directionalLight(), pointLight() or spotLight(), too.
But note, your shader can handle 1 light source only. More the 1 light source would gain
OpenGL error 1282 at top endDraw(): invalid operation
If you want to use more than 1 light source then you would have to use uniform arrays int the vertex shader for lightPosition, lightAmbient, lightDiffuse, and lightSpecular. See Types of shaders in Processing(https://processing.org/tutorials/pshader/)

Related

Mixing colours in OpenGL GLSL

I am trying to create a translucent , stained glass effect in OpenGL, using C++.
I use a normal map generated from a perlin noise texture. Refraction is calculated with respect to manipulated normal vectors and it works.
For stained glass, I am trying mixing the colour from skybox through refraction, with another colour. I tried this with a constant value, and that works too.
When I try to get the second colour from a texture, I do not see the object on screen. I can load the texture if i ignore refraction and mixing though.
here is my vertex shader :
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aNormal;
layout (location = 2) in vec2 aTexCoord;
layout (location = 3) in vec3 aTangent;
layout (location = 4) in vec3 aBitangent;
out vec3 FragPos;
out vec2 texCoord;
out vec3 tangentLightPos;
out vec3 tangentViewPos;
out vec3 tangentFragPos;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
//normal
uniform vec3 lightPos;
uniform vec3 viewPos;
void main()
{
texCoord = aTexCoord;
FragPos = vec3(view * model * vec4(aPos, 1.0));
mat3 normalMatrix = transpose(inverse(mat3(model)));
vec3 T = normalize(normalMatrix * aTangent);
vec3 N = normalize(normalMatrix * aNormal);
T = normalize(T - dot(T, N) * N);
vec3 B = cross(N, T);
mat3 TBN = transpose(mat3(T, B, N));
tangentLightPos = TBN * lightPos;
tangentViewPos = TBN * viewPos;
tangentFragPos = TBN * FragPos;
gl_Position = projection * view * model * vec4(aPos, 1.0);
}
and my fragment shader :
#version 330 core
out vec4 FragColor;
in vec3 FragPos;
in vec2 texCoord;
in vec3 tangentLightPos;
in vec3 tangentViewPos;
in vec3 tangentFragPos;
uniform sampler2D texture1;
uniform sampler2D texture2;
uniform samplerCube skybox;
uniform vec3 cameraPos;
void main()
{
//normal map - normal
vec3 normalDir = texture(texture2, texCoord).rgb;
normalDir = normalize(normalDir * 2.0 - 1.0);
vec3 Incident = normalize(tangentFragPos - cameraPos);
vec3 RefractedDir = refract(Incident, normalDir, 1.0/1.2);
float DispersionIndex = 1.0 - dot(Incident, normalDir);
vec3 RefractedFragColor = vec3(texture(skybox, RefractedDir).rgb);
//FIXME:
vec3 texColor = texture(texture1, texCoord).rgb;
vec3 constTexColor = vec3(1.0, 1.0, 1.0);
FragColor = mix( vec4(RefractedFragColor, 1.0), vec4(texColor.rgb, 0.5), 0.2); // doesnt show object on screen
// FragColor = mix( vec4(RefractedFragColor, 1.0), vec4(constTexColor.rgb, 0.5), 0.2); --> works , shows a tint on glass
// FragColor = vec4(texColor, 1.0); ---> shows only texture
}
with constant value for color in mix :
with only texture:
Why does the mix not work with color from texture?

How can I draw gluQuadric with color?

struct quadricObj {
GLUquadricObj* obj;
GLenum drawmode{ GLU_FILL };
GLdouble radius{1.0};
GLint slices{20};
GLint stacks{20};
glm::vec3 col{ 1.0,0.0,0.0 };
std::vector<glm::mat4> M;
glm::mat4 world_M() {
glm::mat4 WM(1.0f);
std::for_each(this->M.begin(), this->M.end(), [&WM](glm::mat4& m) { WM *= m; });
//M0*M1*M2 TRS
return WM;
}
GLvoid draw() {
gluQuadricDrawStyle(this->obj, this->drawmode);
glUniformMatrix4fv(worldLoc, 1, GL_FALSE, glm::value_ptr(this->world_M()));
glColor4f(this->col.r, this->col.g, this->col.b, 1.0f); // doesn't work.
gluSphere(this->obj, this->radius, this->slices, this->stacks);
}
};
This is my struct for use quadricObj. I think glColor4f has to work but doesn't.
quadrics are staying black.
How can I color quadrics in GL?
#version 330
in vec3 v_normal;
in vec2 v_texCoord;
in vec3 v_color;
in vec3 fragPos;
out vec4 gl_FragColor;
uniform vec3 lightColor;
uniform vec3 lightPos;
uniform vec3 viewPos;
uniform float ambientLight;
uniform int shine;
void main(void)
{
vec3 ambient = clamp(ambientLight*lightColor,0.0,1.0);
vec3 normalVector = normalize(v_normal);
vec3 lightDir = normalize(lightPos-fragPos);
float diffuseLight = max(dot(normalVector,lightDir),0.0);
vec3 diffuse = clamp(diffuseLight * lightColor,0.0,1.0);
vec3 viewDir = normalize(viewPos-fragPos);
vec3 reflectDir = reflect(-lightDir,normalVector);
float specularLight = max(dot(viewDir,reflectDir),0.0);
specularLight = pow(specularLight,shine);
vec3 specular = clamp(specularLight*lightColor,0.0,1.0);
vec3 result = (ambient+diffuse)*v_color+specular*(0.8,0.8,0.8);
gl_FragColor = vec4(result,1.0);
}
I edit my fragment shader contain phong model. this can work with gluSphere too? or not? I'm using vertex shader too. which has inpos,col,nor,tex. and out also.
gluSphere cannot be used together with user defined vertex shader input variables (attribute). You are limited to a GLSL 1.20 vertex shader and the Vertex Shader Built-In Attributes. You can combine a GLSL 1.20 vertex shader with your fragment shader.
A suitable vertex shader can look like this:
#version 120
varying vec3 v_normal;
varying vec2 v_texCoord;
varying vec3 v_color;
varying vec3 fragPos;
uniform mat4 worldMatrix; // the matrix with the location worldLoc
void main()
{
v_color = gl_Color.rgb;
v_texCoord = gl_MultiTexCoord0.st;
v_normal = mat3(worldMatrix) * gl_Normal;
fragPos = (worldMatrix * gl_Vertex).xyz;
gl_Position = gl_ProjectionMatrix * worldMatrix * gl_Vertex;
}

WebGL: adding specular light without the help of THREE.JS

I'm making some first steps in webgl programming. Created a simple setup following this tutorial. Managed to add a few things of my own, though stumbled with adding light, particularly - specular light.
As I assume, most of it would be implemented in my fragment shader, and maybe some additions in the vertex shader and the Light module. So that's the code I provide below.
Vertex shader:
attribute vec3 position;
attribute vec3 normal;
attribute vec2 uv;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
varying vec3 vNormal;
varying vec2 vUv;
void main() {
vUv = uv;
vNormal = (model * vec4(normal, 0.)).xyz;
gl_Position = projection * view * model * vec4(position, 1.);
}
Fragment shader:
#ifdef GL_ES
precision highp float;
#endif
uniform vec3 lightDirection;
uniform float ambientLight;
uniform sampler2D diffuse;
varying vec3 vNormal;
varying vec2 vUv;
void main() {
float lightness = -clamp(dot(normalize(vNormal), normalize(lightDirection)), -1., 0.);
lightness = ambientLight + (1. - ambientLight) * lightness;
gl_FragColor = vec4(texture2D(diffuse, vUv).rgb * lightness, 1.);
}
Light.js module:
function Light () {
this.lightDirection = new Vector3(-1, -1, -1)
this.ambientLight = 0.3
}
Light.prototype.use = function (shaderProgram) {
var dir = this.lightDirection
var gl = shaderProgram.gl
gl.uniform3f(shaderProgram.lightDirection, dir.x, dir.y, dir.z)
gl.uniform1f(shaderProgram.ambientLight, this.ambientLight)
}
I would really appreciate your suggestions here. Thanks in advance!
The most common and simplest light models are the Phong reflection model or the Blinn–Phong model model.
The following shader code, is based on your original code and implements the Blinn–Phong model model. In compare to your code, the light calculations are done in view space, because the specular highlight depends on the view position, which is (0, 0, 0) in view space. So the light direction has to be transformed to view space.
Vertex shader:
attribute vec3 position;
attribute vec3 normal;
attribute vec2 uv;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
uniform vec3 lightDirection;
varying vec3 vPos;
varying vec3 vNormal;
varying vec2 vUv;
varying vec3 lightDirectionView;
void main()
{
lightDirectionView = (view * vec4(lightDirection, 0.)).xyz;
mat4 modelView = view * model;
vec4 viewPos = modelView * vec4(position, 1.0)
vPos = viewPos.xyz;
vUv = uv;
vNormal = (modelView * vec4(normal, 0.)).xyz;
gl_Position = projection * viewPos;
}
Fragemnt shader:
#ifdef GL_ES
precision highp float;
#endif
uniform float shininess;
uniform float ambientLight;
uniform sampler2D diffuse;
varying vec3 vPos;
varying vec3 vNormal;
varying vec2 vUv;
varying vec3 lightDirectionView;
void main()
{
vec3 color = texture2D(diffuse, vUv).rgb;
vec3 N = normalize( vNormal );
vec3 L = normalize( -lightDirectionView );
vec3 V = normalize( -vPos );
vec3 H = normalize( V + L );
float NdotL = dot(N, L);
float NdotH = dot(N, H);
float kDiffuse = max(0.0, NdotL);
// float kSpecular = (shininess + 2.0) * pow(max(0.0, NdotH), shininess) / (2.0 * 3.14159265);
float kSpecular = pow(max(0.0, NdotH), shininess);
vec3 light_col = color * (kDiffuse + kSpecular);
gl_FragColor = vec4(light_col, 1.0);
}
The value of the uniform shininess has to be a positive value in range [1, 100].
See also Phong and Gouraud Shading WebGL.

Two shaders on one object

I have problem with multiply shaders on my one object.
That's my render code:
#include "MeshRenderer.h"
ForwardAmbient* shader1;
ForwardDirectional* shader2;
MeshRenderer::MeshRenderer(Obj& obj) :
meshObject(obj)
{
shader1 = new ForwardAmbient(vec3(1, 1, 1));
shader2 = new ForwardDirectional(vec3(1, 0, 0), vec3(1, 1, 1));
}
MeshRenderer::~MeshRenderer()
{
}
void MeshRenderer::Render(RenderingCore* rc)
{
//for (Shader* shader : meshObject.shaders)
//{
//}
shader1->Bind();
shader1->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
shader2->Bind();
shader2->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
/*
meshObject.shader->Bind();
meshObject.shader->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
*/
}
ambient light.vs:
#version 120
attribute vec3 position;
attribute vec2 texCoord;
attribute vec3 normal;
varying vec2 texCoord0;
uniform mat4 transform;
uniform mat4 projection;
void main()
{
gl_Position = (projection * transform) * vec4(position, 1);
texCoord0 = texCoord;
}
ambient light.fs:
#version 120
varying vec2 texCoord0;
uniform vec3 ambientLight;
uniform float alpha;
uniform sampler2D sampler;
void main()
{
gl_FragColor = texture2D(sampler, texCoord0.xy) * vec4(ambientLight, alpha);
}
directional light.vs
#version 120
attribute vec3 position;
attribute vec2 texCoord;
attribute vec3 normal;
varying vec2 texCoord0;
varying vec3 normal0;
varying vec3 worldPos0;
uniform mat4 transform;
uniform mat4 projection;
void main()
{
gl_Position = (projection * transform) * vec4(position, 1);
texCoord0 = texCoord;
normal0 = (transform * vec4(normal, 0)).xyz;
worldPos0 = (transform * vec4(position, 1)).xyz;
}
directional light.fs
#version 120
varying vec2 texCoord0;
varying vec3 normal0;
varying vec3 worldPos0;
uniform vec3 color;
uniform float alpha;
uniform vec3 direction;
uniform float specularIntensity;
uniform float specularPower;
uniform vec3 eyePosition;
uniform sampler2D sampler;
vec4 calcLight(vec3 color, float alpha, vec3 direction, vec3 normal)
{
float diffuseFactor = dot(normal, -direction);
vec4 diffuseColor = vec4(0,0,0,0);
vec4 specularColor = vec4(0,0,0,0);
if(diffuseFactor > 0)
{
diffuseColor = vec4(color, 1.0) * diffuseFactor;
vec3 directionToEye = normalize(eyePosition - worldPos0);
vec3 reflectDirection = normalize(reflect(direction, normal));
float specularFactor = dot(directionToEye, reflectDirection);
specularFactor = pow(specularFactor, specularPower);
if(specularFactor > 0)
{
specularColor = vec4(color, 1.0) * specularIntensity * specularFactor;
}
}
return diffuseColor + specularColor;
}
vec4 calcDirectionalLight(vec3 color, float alpha, vec3 direction, vec3 normal)
{
return calcLight(color, alpha, -direction, normal);
}
void main()
{
gl_FragColor = texture2D(sampler, texCoord0.xy) * calcDirectionalLight(color, 1, direction, normalize(normal0));
}
Here is the result:
http://imgur.com/Bawny2P
Only ambient light is render, directional light no
Your problem is this:
shader1->Bind();
shader1->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
shader2->Bind();
shader2->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
OpenGL doesn't know what "object" are. It just draws points, lines and triangles, one at a time. To sort out depth overlap the depth buffer method is used. When you use exactly the same drawing call (meshObject.mesh->Render) with all the same vertex setup and depth testing enabled then one of the two draw calls will win over the other.
Also, more importantly, drawing calls don't "stack". You simply can not combine shaders simply by drawing the same thing multiple times; it may sort of work for additive processes. But that's barking up the wrong tree: Instead of saving additional work, you're duplicating the amount of work to be done.
What you should do instead is merging the two shaders into single one and draw the geometry only once, with the merged shader.

Modern GLSL ( opengl 3+ ) : Implementing phong effect correctly;

I am implementing a basic phong lighting GLSL shader; I have looked up some things on the internet, and found that the phong effect was created by adding an ambient, diffuse, and specular layer on the object (see image below, from tom dalling's site); problem is I have seen a lot of examples, and none of them really suits my GLSL set-up. Can any of you give me a code example of the correct way to implement the phong effect which would fit my GLSL set-up ? :
PS : This question could be put on hold because of the fact that it may be based on user opinion : In my mind, it is not, because I would like to know the most effective, and better way of implementing it.
Here is my vertex shader :
#version 120
uniform mat4 modelView;
uniform mat4 MVP;
uniform float time;
attribute vec3 position;
attribute vec2 texCoord;
attribute vec3 normal;
varying vec3 position0;
varying vec2 texCoord0;
varying vec3 normal0;
varying mat4 modelView0;
void main()
{
//Updating varyings...
position0 = position;
texCoord0 = texCoord;
normal0 = (MVP * vec4(normal, 0.0)).xyz;
modelView0 = modelView;
//set position
gl_Position = MVP * vec4(position, 1.0);
}
and my fragment shader :
#version 120
varying vec3 position0;
varying vec2 texCoord0;
varying vec3 normal0;
varying mat4 modelView0;
uniform sampler2D diffuse;
void main()
{
vec4 surfaceColor = texture2D(diffuse, texCoord0);
gl_FragColor = (texture2D(diffuse, texCoord0))
* clamp(dot(-vec3(0.0, 0.5, 0.5), normal0), 0, 1.0);
}
try this:
void main()
{
vec4 texread = texture2D(diffuse, texCoord0);
vec3 normal = normalize(normal0);
vec3 material_kd = vec3(1.0,1.0,1.0);
vec3 material_ks = vec3(1.0,1.0,1.0);
vec3 material_ka = vec3(0.2,0.2,0.2);
vec3 material_ke = vec3(0.0,0.0,0.0);
float material_shininess = 60;
vec3 lightpos = vec3(0.0,10.0,5.0);
vec3 lightcolor = vec3(1.0,1.0,1.0);
vec3 lightdir = normalize(lightpos - worldPosition);
float shade = clamp(dot(lightdir, normal), 0.0, 1.0);
vec3 toWorldpos = normalize((worldPosition) - u_eyePos);
vec3 reflectDir = reflect( toWorldpos, normal );
vec4 specular = vec4(pow(clamp(dot(lightdir, reflectDir),0.0,1.0), material_shininess) * lightcolor * material_ks, 1.0);
vec4 shaded = texread * vec4(material_kd, 1.0) * vec4(lightcolor , 1.0) * shade;
vec4 ambient = texread * vec4(material_ka, 1.0);
vec4 emission = vec4(material_ke, 1.0);
gl_FragColor = shaded + specular + emission + ambient;
}
it may have some compilation errors though as i didnt run it...
you may need to upload your eye position as a uniform (u_eyePos), and calculate the worldposition (worldPosition) for it to work
I made my own sphong shader : here is the code :
fragment shader :
#version 150
uniform mat4 modelView;
uniform mat3 normalMatrix;
uniform vec3 cameraPosition;
uniform sampler2D materialTex;
uniform float materialShininess;
uniform vec3 materialSpecularColor;
uniform vec3 lightPosition;//light settings
uniform vec3 lightIntensities;
uniform float lightAttenuation;
uniform float lightAmbientCoeff;
in vec3 position0;
in vec2 texCoord0;
in vec3 normal0;
out vec4 fragmentColor;
void main()
{
//calculate normal in world coordinates
vec3 normal = normalize(normalMatrix * normal0);
//calculate the location of this fragment (pixel) in world coordinates
vec3 surfacePos = vec3(modelView * vec4(position0, 1));
//color of the current fragment
vec4 surfaceColor = texture(materialTex, texCoord0);
//calculate the vector from this pixels surface to the light source
vec3 surfaceToLight = normalize(lightPosition - surfacePos);
//cam distance
vec3 surfaceToCamera = normalize(cameraPosition - surfacePos);
///////////////////////////DIFUSE///////////////////////////////////////
//calculate the cosine of the angle of incidence
//float diffuseCoeff = dot(normal, surfaceToLight) / (length(surfaceToLight) * length(normal));
float diffuseCoeff = max(0.0, dot(normal, surfaceToLight));
vec3 diffuse = diffuseCoeff * surfaceColor.rgb * lightIntensities;
/////////////////////////AMBIENT////////////////////////////////////////
vec3 ambient = lightAmbientCoeff * surfaceColor.rgb * lightIntensities;
/////////////////////////SPECULAR//////////////////////////////////////
float specularCoeff = 0.0;
if(diffuseCoeff > 0.0)
specularCoeff = pow(max(0.0, dot(surfaceToCamera, reflect(-surfaceToLight, normal))), materialShininess);
vec3 specular = specularCoeff * materialSpecularColor * lightIntensities;
////////////////////////ATTENUATION///////////////////////////////////
float distanceToLight = length(lightPosition - surfacePos);
float attenuation = 1.0 / (1.0 + lightAttenuation * pow(distanceToLight, 2));
/////////////////////////////////FINAL/////////////////////////////////
vec3 linearColor = ambient + attenuation * (diffuse + specular);
//finalColor with gamma correction
vec3 gamma = vec3(1.0/2.2);
fragmentColor = vec4(pow(linearColor, gamma), surfaceColor.a);
//fragmentColor = vec4(diffuseCoeff * lightIntensities * surfaceColor.rgb, surfaceColor.a);
}