QOpenGLWidget cannot use more than one sampler2D - c++

I've created a very basic shader:
static const char *frag_showImage =
"#version 150\n"
"uniform sampler2D textureSampler;\n"
"in mediump vec2 texc;\n"
"out highp vec4 fragColor;\n"
"void main() {\n"
" fragColor = texture2D(textureSampler, texc.st);\n"
"}\n";
And it works as expected, now a bit more complex one:
"#version 150\n"
"uniform sampler2D textureSampler;\n"
"uniform sampler2D maskSampler;\n"
"in mediump vec2 texc;\n"
"out highp vec4 fragColor;\n"
"void main() {\n"
" fragColor = texture2D(textureSampler, texc.st);\n"
" fragColor.x = texture2D(maskSampler, texc.st).x;\n"
" fragColor.y =0;\n"
"}\n";
It doesn't work but it has no warnings neither errors:
in both cases I bind the first texture as:
QOpenGLFunctions *f =this->context()->functions();
f->glActiveTexture(GL_TEXTURE0);
glBaseTexture->bind();
m_program->setUniformValue("textureSampler", 0);
and the second texture is binded as:
f->glActiveTexture(GL_TEXTURE1);
glMaskTexture->bind();
m_program->setUniformValue("maskSampler", 1);
Notice that if I bind glMaskTexture for the first shader it works ok so the problem is not on that QOpenGlTexture.
Any idea? Thank you in advance!

Goblins: renaming maskSampler to other name it works ok, I have no idea about why this is happening since "maskSampler" is not used in any other part of the code.

Related

Can you write OpenGL shader in different file and later link it to the program?

Can you write OpenGL shader in a different file and later link it to the program? and if it's possible how? writing OpenGL shader in string makes my code messy.
Here is example code for shaders:
const char* vertexShaderSource =
"#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"layout (location = 1) in vec3 aColor;\n"
"\n"
"out vec3 ourColor;\n"
"uniform vec2 angleValues;\n"
"\n"
"void main()\n"
"{\n"
"gl_Position = vec4(aPos.x * angleValues.x - aPos.y * angleValues.y, aPos.y * angleValues.x + aPos.x * angleValues.y , aPos.z, 1.0);\n"
"ourColor = aColor;\n"
"}\n";
const char* fragmentShaderSource =
"#version 330 core\n"
"out vec4 FragColor;\n"
"in vec3 ourColor;\n"
"\n"
"void main()\n"
"{\n"
"FragColor = vec4(ourColor, 1.0);\n"
"}\n";
Yes, you can have files like my_shader.vs or my_fragment.fs and link them like in this Shader class
Just initialize it like this:
shader = Shader("./shaders/my_shader.vs", "./shaders/my_fragment.fs");

Qt and OpenGL: Fragment shader won't compile when using addShaderFromSourceFile instead of addShaderFromSourceCode

I am using QOpenGLWidget to render some CAD meshes. I started with an example that had the shader source code written directly into the code in the following way:
Vertex shader
static const char *vertexShaderSourceCore =
"#version 330\n"
"in vec4 vertex;\n"
"in vec3 normal;\n"
"out vec3 vert;\n"
"out vec3 vertNormal;\n"
"uniform mat4 projMatrix;\n"
"uniform mat4 mvMatrix;\n"
"uniform mat3 normalMatrix;\n"
"void main() {\n"
" vert = vertex.xyz;\n"
" vertNormal = normalMatrix * normal;\n"
" gl_Position = projMatrix * mvMatrix * vertex;\n"
"}\n";
Fragment shader
static const char *fragmentShaderSourceCore =
"#version 330\n"
"in highp vec3 vert;\n"
"in highp vec3 vertNormal;\n"
"out highp vec4 fragColor;\n"
"uniform highp vec3 lightPos;\n"
"void main() {\n"
" highp vec3 L = normalize(lightPos - vert);\n"
" highp float NL = max(dot(normalize(vertNormal), L), 0.0);\n"
" highp vec3 color = vec3(0.39, 0.4, 0.0);\n"
" highp vec3 col = clamp(color * 0.2 + color * 0.8 * NL, 0.0, 1.0);\n"
" fragColor = vec4(col, 1.0);\n"
"}\n";
I load this code into my QOpenGLShaderProgram with the function addShaderFromSourceCode. With this, everything works as expected.
Now I wanted to move the shader source code to a separate file in order to make the code cleaner. I created a .qrc file and put in the shaders in the following way:
Vertex Shader
#version 330
in vec4 vertex;
in vec3 normal;
out vec3 vert;
out vec3 vertNormal;
uniform mat4 projMatrix;
uniform mat4 mvMatrix;
uniform mat3 normalMatrix;
void main()
{
vert = vertex.xyz;
vertNormal = normalMatrix * normal;
gl_Position = projMatrix * mvMatrix * vertex;
}
Fragment shader
in highp vec3 vert;
in highp vec3 vertNormal;
out highp vec4 fragColor;
uniform highp vec3 lightPos;
void main() {
highp vec3 L = normalize(lightPos - vert);
highp float NL = max(dot(normalize(vertNormal), L), 0.0);
highp vec3 color = vec3(0.39, 0.4, 0.0);
highp vec3 col = clamp(color * 0.2 + color * 0.8 * NL, 0.0, 1.0);
fragColor = vec4(col, 1.0);
}
I then wanted to use QOpenGLShaderProgram::addShaderFromSourceFile to load the shader code from these files.
The vertex shader loads without issues, but when importing the fragment shader I get the following error message:
QOpenGLShader::compile(Fragment): ERROR: 4:1: ':' : syntax error syntax error
*** Problematic Fragment shader source code ***
#version 110
#ifdef GL_KHR_blend_equation_advanced
#extension GL_ARB_fragment_coord_conventions : enable
#extension GL_KHR_blend_equation_advanced : enable
#endif
#define lowp
#define mediump
#define highp
#line 1
:/globalCore.frag
***
which I do not understand. I don't think its a compatibility issue since I have two additional shaders if core mode is not enabled.
Can someone help me understand where this ominous "syntax error" is coming from? I don't have a colon in my shader code so I am at a loss here.

why some triangles are becoming black after I add lighting? [closed]

Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 1 year ago.
Improve this question
I am trying to add specular lighting to my opengl es program which loads 3d model. Its working normally. But whenever I add lighting this happens:
some triangles are becoming black and some are staying white.
here is my Vertex and fragment shader code:
"attribute vec4 position;\n"
"attribute vec4 normal;\n"
"attribute vec4 color;\n"
"attribute vec2 texCord;\n"
"varying vec4 vcolor;\n"
"varying vec2 vtexCord;\n"
"varying vec3 s_normal;\n"
"varying vec3 toLightv;\n"
"varying vec3 toCameraV;\n"
"uniform vec3 light_pos;\n"
"uniform mat4 MVP;\n"
"uniform mat4 view;"
"uniform mat4 transform;\n"
"void main()\n"
"{\n"
"gl_Position = MVP * vec4(position.xyz, 1.0);\n"
"vcolor = color;\n"
"vtexCord = texCord;\n"
"s_normal = (transform * vec4(normal.xyz,0.0)).xyz;\n"
"toLightv = light_pos - (MVP * vec4(position.xyz, 1.0)).xyz;\n"
"toCameraV = (view * vec4(0.0,0.0,0.0,1.0)).xyz - (MVP * vec4(position.xyz, 1.0)).xyz;\n"
"}";
`
"precision mediump float;\n"
"varying vec4 vcolor;\n"
"varying vec2 vtexCord;\n"
"varying vec3 s_normal;\n"
"varying vec3 toLightv;\n"
"varying vec3 toCameraV;\n"
"uniform sampler2D s_texr;\n"
"uniform vec3 light_col;\n"
"void main()\n"
"{\n"
// "gl_FragColor = vec4(1.0,0.0,1.0,1.0);\n"
//"gl_FragColor = vec4 (vcolor.xyz,1.0);\n"
"vec3 unitCV = normalize(toCameraV);\n"
"vec3 unitNL = normalize(s_normal);\n"
"vec3 unitLV = normalize(toLightv);\n"
"vec3 lightComing = -unitLV;\n"
"vec3 reflectedL = reflect(lightComing,unitNL);\n"
"float specularFactor = dot(reflectedL,toCameraV);\n"
"specularFactor = max(specularFactor,0.0);\n"
"float dampFactor = pow(specularFactor,1.0);\n"
"vec3 Specular= dampFactor * vec3(1.0,1.0,1.0);\n"
"float nDotl = dot(unitNL,unitLV);"
"vec3 diffuse =max(nDotl,0.1) * vec3(1.0,1.0,1.0);"
// diffuse = diffuse * (1.0 / (1.0 + (0.00000025 * distance * distance)));
"gl_FragColor =vec4(diffuse.xyz,1.0)* texture2D(s_texr, vtexCord)+vec4(Specular.xyz,1.0);"
"};"
I have enabled depth testing and the problem solved.
glEnable(GL_DEPTH_TEST);

Why comment glBindFragDataLocation, the GL also works correctly?

const GLchar* vertexSource1 = "#version 330 core\n"
"layout (location = 0) in vec2 position;\n"
"layout (location = 1) in vec3 color;\n"
"out vec3 Color;\n"
"void main()\n"
"{\n"
"gl_Position = vec4(position, 0.0, 1.0);\n"
"Color = color;\n"
"}\0";
const GLchar* fragmentSource1 = "#version 330 core\n"
" in vec3 Color;\n"
" out vec4 outColor;\n"
" void main()\n"
" {\n"
" outColor = vec4(Color, 1.0);\n"
" }\n";
GLuint shaderProgram1 = glCreateProgram();
glAttachShader(shaderProgram1, vertexShader1);
glAttachShader(shaderProgram1, fragmentShader1);
// glBindFragDataLocation(shaderProgram1, 0, "Color");
glLinkProgram(shaderProgram1);
Whether I add glBindFragDataLocation or not, the GL works correctly, Why?
Because you're "lucky". The OpenGL specification provides no guarantees about how fragment shader output locations are assigned if you don't assign them. It only says that each one will have a separate location; what locations those are is up to the implementation.
However, considering the sheer volume of code that writes to a single output variable without explicitly assigning it to a location, it's highly unlikely that an OpenGL implementation would ever assign the first FS output location to anything other than 0. So while it isn't a spec guarantee, at this point, it is a de-facto requirement of implementations.
Note: That doesn't mean you shouldn't assign that location manually. It's always best to be on the safe and explicit side.
FYI: layout(location) works for fragment shader outputs too. So you should use that if you're using it on vertex attributes. Then you don't have to worry about doing it from code.

is it possible to be a shader interference with this lightning?

Im working on Opengl ES I got 2 shader interfering with each other. I mean, if I use the second shader, the lightning shader stops working as it normally does, as if the normals on the vbo gets corrupted. It only happens when I add this shader:
"#ifdef GL_ES \n" //00
"precision mediump float; \n" //01
"#else \n" //02
"#version 100 \n" //03
"precision mediump float; \n" //04
"#endif
\n" //05
"//ProgressBarShader\n"
"uniform float iGlobalTime; \n" //06
"varying vec34 v_position; \n"
"varying vec2 v_texCoord; \n" //07
"uniform sampler2D s_texture; \n" //11
"void main() {\n" //1
" float igt=((iGlobalTime*15.0)+(sin(iGlobalTime)*25.0)); \n"
" if (gl_FragCoord.x>(1024.0-igt)) \n"
" gl_FragColor = texture2D( s_texture, v_texCoord ); "
"}\n";
The other one is a Per Fragment Shader took from this web : http://www.lighthouse3d.com/tutorials/glsl-tutorial/point-light-per-pixel/
but modified for opengl es 2.0 (and my inputs)
"#ifdef GL_ES \n"
"precision mediump float; \n"
"#else \n"
"#version 100 \n"
"precision mediump float; \n"
"#endif \n"
"uniform float iGlobalTime; \n"
"varying vec2 v_texCoord; \n"
"varying vec4 v_normal; \n"
"varying vec4 v_position; \n"
"uniform sampler2D s_texture; \n"
"uniform mat4 MVPMat; \n"
"uniform mat3 iMVPMat; \n"
"uniform vec2 iResolution; \n"
"uniform vec3 iMouse; \n"
"uniform vec4 objectMaterialEmission; // Ecm \n"
"uniform vec4 objectMaterialAmbient; // Acm \n"
"uniform vec4 objectMaterialDiffuse; // Dcm \n"
"uniform vec4 objectMaterialSpecular; // Scm \n"
"uniform float objectMaterialGlossiness; // Gcm \n"
"uniform float objectMaterialShininess; // Srm \n"
"uniform float lightStrength; \n"
"uniform vec4 lightAmbient; \n"
"uniform vec4 lightDiffuse; \n"
"uniform vec4 lightSpecular; \n"
"uniform vec3 lightPosition; \n"
"uniform vec3 lightSpotDirection; \n"
"uniform float lightSpotExponent; \n"
"uniform float lightSpotCutoff; // (range: [0.0,90.0], 180.0)\n"
"uniform float lightSpotCosCutoff; // (range: [1.0,0.0],-1.0)\n"
"uniform float lightConstantAttenuation; \n"
"uniform float lightLinearAttenuation; \n"
"uniform float lightQuadraticAttenuation;\n"
"uniform bool lightOn; \n"
"varying vec3 N;\n"
"varying vec3 v;\n"
"void main (void) \n"
"{ \n"
" vec3 L = normalize(-lightPosition.xyz - v); \n"
" vec3 E = normalize(-v); // we are in Eye Coordinates, so EyePos is (0,0,0) \n"
" vec3 R = normalize(-reflect(L,N)); \n"
" //calculate Ambient Term: \n"
" vec4 Iamb = lightAmbient; \n"
" // write Total Color: \n"
" vec4 textureColor = texture2D( s_texture, v_texCoord ); \n"
" vec4 color =textureColor; \n"
" //calculate Diffuse Term: \n"
" vec4 Idiff = lightDiffuse * max(dot(N,L), 0.0);\n"
" Idiff = clamp(Idiff, 0.0, 1.0); \n"
" float distance = length(lightPosition-v); \n"
" float attenuation = lightStrength/(lightConstantAttenuation+(distance*lightLinearAttenuation));\n"
" gl_FragColor = clamp( color *(Idiff+Iamb)*attenuation,0.0,1.0); \n"
" gl_FragColor.a = 1.0;"
"}\n";
If I use the progress bar shader (the first one), my lightnings dissapears and only some faces gets enlighted. Any other shader works fine, it only gets disrupted when I add the progress bar shader.
I researched the web, but never seen something about shader interfering each other. Is it possible? What should I do?
At the end, it was a fail on the data binding, the binding required the shader to be active (glUseProgram) the first time, the binding was correct, but when the lighting shader came, the program active was the wrong one, and the binding when to the other one.
Sorry for the misleading question.