I wrote shaders in #version 150. I have problem with uniforms. If I use any of light uniforms (vec4) my scene disappears.
Extraction of setting uniforms:
typedef struct { float x, y, z, w; } vec4;
//...
class MyClass {
GLuint _id;
vec4 light_diffuse;
};
//...
void MyClass::setUniforms {
//...
GLint location = glGetUniformLocation(_id, "in_light_diffuse");
//...
glUseProgram(_id);
//...
glUniform4fv(location, 1, (const GLfloat *)&light_diffuse);
//...
}
THIS WORKS:
#version 150
in vec4 in_vertex;
in vec3 in_normal;
in vec4 in_color;
uniform mat4 in_mvp_matrix;
uniform vec4 in_light_position;
uniform vec4 in_light_ambient;
uniform vec4 in_light_diffuse;
uniform vec4 in_light_specular;
out vec4 v_color;
void main() {
//vec4 a = in_light_position + in_light_ambient + in_light_diffuse + in_light_specular;
v_color = in_color;
gl_Position = in_mvp_matrix * in_vertex;
}
THIS DOESN'T WORK:
#version 150
in vec4 in_vertex;
in vec3 in_normal;
in vec4 in_color;
uniform mat4 in_mvp_matrix;
uniform vec4 in_light_position;
uniform vec4 in_light_ambient;
uniform vec4 in_light_diffuse;
uniform vec4 in_light_specular;
out vec4 v_color;
void main() {
vec4 a = in_light_position + in_light_ambient + in_light_diffuse + in_light_specular;
v_color = in_color;
gl_Position = in_mvp_matrix * in_vertex;
}
In first case I can see colorful objects, but in the second everything disappears. I don't even use these uniforms for calculation out_flag_color and result is empty screen.
It's simple. If I use one of them (no matter for what) it's empty.
Anybody help?
Solved. I was setting vec4 to uniform mat4 location.
Related
i have written a fragment shader that works just fine with a single light. Now I am trying to adapt it to work with 8 lights, the implement it in Processing. Clearly I am doing something wrong in the math and I cannot see what it is... I have read other posts about this and try to adapt the answer to my problem, no luck though...
////Fragment/////
#ifdef GL_ES
precision mediump float;
precision mediump int;
#endif
varying vec4 vertColor;
varying vec3 ecNormal;
varying vec3 lightDir;
void main() {
vec3 direction = normalize(lightDir);
vec3 normal = normalize(ecNormal);
float intensity = max(0.0, dot(direction, normal));
gl_FragColor = vec4(intensity, intensity, intensity, 1) * vertColor;
}
////vertex/////
#define PROCESSING_LIGHT_SHADER
uniform mat4 modelview;
uniform mat4 transform;
uniform mat3 normalMatrix;
uniform vec4 lightPosition;
uniform vec3 lightNormal;
attribute vec4 vertex;
attribute vec4 color;
attribute vec3 normal;
varying vec4 vertColor;
varying vec3 ecNormal;
varying vec3 lightDir;
void main() {
gl_Position = transform * vertex;
vec3 ecVertex = vec3(modelview * vertex);
ecNormal = normalize(normalMatrix * normal);
lightDir = normalize(lightPosition.xyz - ecVertex);
vertColor = color;
}
Just making it compile real quick with an online shader tool (http://shdr.bkcore.com/).
You might need to pass the attributes from the veretex shader to varyings for the fragment shader, but I'm not sure, been a while since I wrote shaders.
#ifdef GL_ES
precision mediump float;
precision mediump int;
#endif
uniform mat4 modelview;
uniform mat4 normalMatrix;
uniform int lightCount;
uniform vec4 lightPosition[8];
varying vec4 vertex; //was attribute, no such thing in frag shaders
varying vec3 normal; //was attribute
varying vec4 vertColor;
void main() {
vec3 vertexCamera = vec3(modelview * vertex);
vec3 transformedNormal = normalize(normalMatrix * vec4(normal,1)).xyz; //was vec3 = normalize(mat4*vec3);
float intensity = 0.0;
for(int i = 0 ; i<8;i++){ //can't loop over a non-constant variable
if(lightCount<i)
{
vec3 direction = normalize(lightPosition[i].xyz - vertexCamera);
intensity += max(0.0, dot(direction, transformedNormal));
}
}
gl_FragColor = vec4(intensity, intensity, intensity, 1) * vertColor;
}
I am using FragmentShader and VertexShader at present, and works absolutely fine. I cannot get my geometry shader working. I am absolutely new to it, below is what I have tried.
I am using VBO, lighting and textures along with some geometry, but it works fine before using GeometryShader. the only thing I have changed is the variable names as I had to get the input in the geometry shader and give the output. So I have appended 1 at the end of those variable names those which will go out from geometry shader to the fragment shader.
Also I have added headers starting with # which were earlier not there. I am using GL_TRIANGLES to draw.
VertexShader
in vec4 position;
in vec4 color1;
in vec4 normal;
in vec2 texCoord;
uniform sampler2D Tex1;
uniform int use_texture;
out vec4 pcolor;
out vec3 N;
out vec3 L;
out vec3 R;
out vec3 V;
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
#version 330 compatibility
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
//varying vec3 v_normal; // vertex normal
out vec4 v_color; // vertex color
out vec4 pos_in_eye; //vertex position in eye space
out vec2 FtexCoord;
void main(){
gl_Position = local2clip * position;
N = normalize(vec3(normal_matrix * normal)); //v_normal
vec4 Lpos = world2eye * light_pos; //light pos. in eye
vec4 Vpos = local2eye * position; //pos_in_eye
L = normalize(vec3(Lpos - Vpos)); //light_vector
R = normalize(reflect(-L, N));
V = normalize(vec3(-Vpos)); //eye vector
vec3 halfv = normalize(L+V);
FtexCoord = texCoord;
//pcolor = color1;
}
This is my FragemntShader
#version 330 compatibility
uniform int use_texture;
in vec4 pcolor;
in vec3 N1;
in vec3 L1;
in vec3 R1;
in vec3 V1;
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
uniform sampler2D Tex1;
in vec2 FtexCoord1;
void main() {
vec4 ambient = light_ambient * mat_ambient;
float NdotL;
if (dot(N1,L1) <0.0) NdotL = 0.0;
else NdotL = dot(N1, L1);
vec4 diffuse = light_diffuse * mat_diffuse * NdotL;
float RdotV;
RdotV = dot(R1, V1);
if (NdotL == 0.0) RdotV = 0.0;
if (RdotV <0.0) RdotV = 0.0;
vec4 specular = light_specular * mat_specular * pow(RdotV,mat_shine);
vec4 texcolor;
if( use_texture == 1 ) {
texcolor = texture2D(Tex1, FtexCoord1);
gl_FragColor = texcolor;
}
else
gl_FragColor = (diffuse + ambient + specular);
}
This is my GeometryShader
#version 330
layout (triangles) in;
layout (triangles) out;
layout (max_vertices = 3) out;
out vec3 N1;
out vec3 L1;
out vec3 R1;
out vec3 V1;
in vec3 N;
in vec3 L;
in vec3 R;
in vec3 V;
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
//varying vec3 v_normal; // vertex normal
out vec4 v_color1; // vertex color
out vec4 pos_in_eye1; //vertex position in eye space
out vec2 FtexCoord1;
in vec4 v_color; // vertex color
in vec4 pos_in_eye; //vertex position in eye space
in vec2 FtexCoord;
void main(void)
{
int i;
N1=N;
L1=L;
R1=R;
V1=R;
FtexCoord1=FtexCoord;
v_color1=v_color;
pos_in_eye1=pos_in_eye;
for (i = 0; i < gl_in.length(); i++)
{
gl_Position = gl_in[i].gl_Position;
EmitVertex();
}
EndPrimitive();
}
I just want that what ever was there earlier is passed from vertex shader to fragment shader via geometry shader, so that I can manipulate the shader later. Currently the screen is just black
The core of your problem is that you didn't bother to check for compilation errors when you built your Geometry Shader. I know that because I see several syntax errors for it. In particular:
in vec3 N;
in vec3 L;
in vec3 R;
in vec3 V;
in vec4 v_color; // vertex color
in vec4 pos_in_eye; //vertex position in eye space
in vec2 FtexCoord;
Geometry Shader inputs are always aggregated into arrays. Remember: a geometry shader operates on primitives, which are defined as a collection of one or more vertices. Each GS invocation therefore gets a set of per-vertex input values, one for each vertex in the primitive type defined by your layout in qualifier.
Notice how you loop over the number of vertices in a primitive and use gl_in[i] to get the input value for each vertex in the primitive. That's how you need to access all of your Geometry Shader inputs. And you need to write each one to its corresponding output variable, then call EmitVertex. All in that loop.
I am using FragmentShader and VertexShader at present, and works absolutely fine. I cannot get my geometry shader working proprly. I am absolutely new to it, below is what I have tried.
I am using VBO, lighting and textures along with some geometry, but it works fine before using GeometryShader. The only thing I have changed is the variable names as I had to get the input in the geometry shader and give the output. So I have appended 1 at the end of those variable names those which will go out from geometry shader to the fragment shader.
Also I have added headers starting with # which were earlier not there. I am using GL_TRIANGLES to draw.
VertexShader
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
//varying vec3 v_normal; // vertex normal
out vec2 FtexCoord;
void main(){
gl_Position = local2clip * position;
N = normalize(vec3(normal_matrix * normal)); //v_normal
vec4 Lpos = world2eye * light_pos; //light pos. in eye
vec4 Vpos = local2eye * position; //pos_in_eye
L = normalize(vec3(Lpos - Vpos)); //light_vector
R = normalize(reflect(-L, N));
V = normalize(vec3(-Vpos)); //eye vector
vec3 halfv = normalize(L+V);
FtexCoord = texCoord;
//pcolor = color1;
}
This is my FragmentShader
#version 330 compatibility
uniform int use_texture;
in vec4 pcolor;
in vec3 N1;
in vec3 L1;
in vec3 R1;
in vec3 V1;
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
uniform sampler2D Tex1;
in vec2 FtexCoord1;
void main() {
vec4 ambient = light_ambient * mat_ambient;
float NdotL;
if (dot(N1,L1) <0.0) NdotL = 0.0;
else NdotL = dot(N1, L1);
vec4 diffuse = light_diffuse * mat_diffuse * NdotL;
float RdotV;
RdotV = dot(R1, V1);
if (NdotL == 0.0) RdotV = 0.0;
if (RdotV <0.0) RdotV = 0.0;
vec4 specular = light_specular * mat_specular * pow(RdotV,mat_shine);
vec4 texcolor;
if( use_texture == 1 ) {
texcolor = texture2D(Tex1, FtexCoord1);
gl_FragColor = texcolor;
}
else
gl_FragColor = (diffuse + ambient + specular);
}
This is my Geometry Shader
#version 330
layout (triangles) in;
layout (triangle_strip, max_vertices = 3) out;
out vec3 N1;
out vec3 L1;
out vec3 R1;
out vec3 V1;
in vec3 N[3];
in vec3 L[3];
in vec3 R[3];
in vec3 V[3];
uniform mat4 local2clip;
uniform mat4 local2eye;
uniform mat4 normal_matrix;
uniform mat4 world2eye;
uniform vec4 light_ambient;
uniform vec4 light_diffuse;
uniform vec4 light_specular;
uniform vec4 light_pos;
uniform vec4 mat_ambient;
uniform vec4 mat_diffuse;
uniform vec4 mat_specular;
uniform float mat_shine;
//varying vec3 v_normal; // vertex normal
out vec2 FtexCoord1;
in vec2 FtexCoord[3];
void main(void)
{
int i;
for (i = 0; i < gl_in.length(); i++)
{
N1=N[i];
L1=L[i];
R1=R[i];
V1=R[i];
FtexCoord1=FtexCoord[i];
gl_Position = gl_in[i].gl_Position;
EmitVertex();
}
EndPrimitive();
}
I just want that what ever was there earlier is passed from vertex shader to fragment shader via geometry shader, so that I can manipulate the shader later. But the light is not showing the same effect.As shown in the pics.
There was a small bug in the code.It should be V1=V[i]; Instead of V1=R[i];
I have problem with multiply shaders on my one object.
That's my render code:
#include "MeshRenderer.h"
ForwardAmbient* shader1;
ForwardDirectional* shader2;
MeshRenderer::MeshRenderer(Obj& obj) :
meshObject(obj)
{
shader1 = new ForwardAmbient(vec3(1, 1, 1));
shader2 = new ForwardDirectional(vec3(1, 0, 0), vec3(1, 1, 1));
}
MeshRenderer::~MeshRenderer()
{
}
void MeshRenderer::Render(RenderingCore* rc)
{
//for (Shader* shader : meshObject.shaders)
//{
//}
shader1->Bind();
shader1->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
shader2->Bind();
shader2->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
/*
meshObject.shader->Bind();
meshObject.shader->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
*/
}
ambient light.vs:
#version 120
attribute vec3 position;
attribute vec2 texCoord;
attribute vec3 normal;
varying vec2 texCoord0;
uniform mat4 transform;
uniform mat4 projection;
void main()
{
gl_Position = (projection * transform) * vec4(position, 1);
texCoord0 = texCoord;
}
ambient light.fs:
#version 120
varying vec2 texCoord0;
uniform vec3 ambientLight;
uniform float alpha;
uniform sampler2D sampler;
void main()
{
gl_FragColor = texture2D(sampler, texCoord0.xy) * vec4(ambientLight, alpha);
}
directional light.vs
#version 120
attribute vec3 position;
attribute vec2 texCoord;
attribute vec3 normal;
varying vec2 texCoord0;
varying vec3 normal0;
varying vec3 worldPos0;
uniform mat4 transform;
uniform mat4 projection;
void main()
{
gl_Position = (projection * transform) * vec4(position, 1);
texCoord0 = texCoord;
normal0 = (transform * vec4(normal, 0)).xyz;
worldPos0 = (transform * vec4(position, 1)).xyz;
}
directional light.fs
#version 120
varying vec2 texCoord0;
varying vec3 normal0;
varying vec3 worldPos0;
uniform vec3 color;
uniform float alpha;
uniform vec3 direction;
uniform float specularIntensity;
uniform float specularPower;
uniform vec3 eyePosition;
uniform sampler2D sampler;
vec4 calcLight(vec3 color, float alpha, vec3 direction, vec3 normal)
{
float diffuseFactor = dot(normal, -direction);
vec4 diffuseColor = vec4(0,0,0,0);
vec4 specularColor = vec4(0,0,0,0);
if(diffuseFactor > 0)
{
diffuseColor = vec4(color, 1.0) * diffuseFactor;
vec3 directionToEye = normalize(eyePosition - worldPos0);
vec3 reflectDirection = normalize(reflect(direction, normal));
float specularFactor = dot(directionToEye, reflectDirection);
specularFactor = pow(specularFactor, specularPower);
if(specularFactor > 0)
{
specularColor = vec4(color, 1.0) * specularIntensity * specularFactor;
}
}
return diffuseColor + specularColor;
}
vec4 calcDirectionalLight(vec3 color, float alpha, vec3 direction, vec3 normal)
{
return calcLight(color, alpha, -direction, normal);
}
void main()
{
gl_FragColor = texture2D(sampler, texCoord0.xy) * calcDirectionalLight(color, 1, direction, normalize(normal0));
}
Here is the result:
http://imgur.com/Bawny2P
Only ambient light is render, directional light no
Your problem is this:
shader1->Bind();
shader1->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
shader2->Bind();
shader2->UpdateShader(rc, transform, meshObject.material);
meshObject.material->GetTexture()->Bind(0);
meshObject.mesh->Render();
OpenGL doesn't know what "object" are. It just draws points, lines and triangles, one at a time. To sort out depth overlap the depth buffer method is used. When you use exactly the same drawing call (meshObject.mesh->Render) with all the same vertex setup and depth testing enabled then one of the two draw calls will win over the other.
Also, more importantly, drawing calls don't "stack". You simply can not combine shaders simply by drawing the same thing multiple times; it may sort of work for additive processes. But that's barking up the wrong tree: Instead of saving additional work, you're duplicating the amount of work to be done.
What you should do instead is merging the two shaders into single one and draw the geometry only once, with the merged shader.
Hi i'm new to GLSL and i'm having a few problems.
I'm trying to create a pair of GLSL Shaders to either use color or texture but i must be doing something wrong.
The problem is that if set uUseTexture to 0 (which should indicate color) it doesn't work (object is not colored). I know the coloring code works separately, any hints why it does not work using the if statement?
Here is the code:
// Fragment
precision mediump float;
uniform int uUseTexture;
uniform sampler2D uSampler;
varying vec4 vColor;
varying vec2 vTextureCoord;
void main(void) {
if(uUseTexture == 1) {
gl_FragColor = texture2D(uSampler, vec2(vTextureCoord.s, vTextureCoord.t));
} else {
gl_FragColor = vColor;
}
}
// Vertex
attribute vec3 aVertexPosition;
attribute vec4 aVertexColor;
attribute vec2 aTextureCoord;
uniform int uUseTexture;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying vec4 vColor;
varying vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
if(uUseTexture == 1) {
vTextureCoord = aTextureCoord;
} else {
vColor = aVertexColor;
}
Nothing springs to mind immediately glancing over your code, but I'd like to take a moment and point out that this use case can be covered without needing an if statement. For example, let's treat uUseTexture as a float instead of an int (you could cast it in the shader but this is more interesting):
// Vertex
attribute vec3 aVertexPosition;
attribute vec4 aVertexColor;
attribute vec2 aTextureCoord;
uniform mat4 uMVMatrix;
uniform mat4 uPMatrix;
varying vec4 vColor;
varying vec2 vTextureCoord;
void main(void) {
gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
// It may actually be faster to just assign both of these anyway
vTextureCoord = aTextureCoord;
vColor = aVertexColor;
}
// Fragment
uniform float uUseTexture;
uniform sampler2D uSampler;
varying vec4 vColor;
varying vec2 vTextureCoord;
void main(void) {
// vTextureCoord is already a vec2, BTW
vec4 texColor = texture2D(uSampler, vTextureCoord) * uUseTexture;
vec4 vertColor = vColor * (1.0 - uUseTexture);
gl_FragColor = texColor + vertColor;
}
Now uUseTexture simply acts as a modulator for how much of each color source you want to use. And it's more flexible in that you could set it to 0.5 and get half texture/half vertex color too!
The thing that may surprise you is that there's a good likelihood that this is what the shader compiler is doing behind the scenes anyway when you use an if statement like that. It's typically more efficient for the hardware that way.