I'm working on OpenGL 2.1 and have some problems with alpha value on gl_FragColor.
Whole code:
uniform sampler2D texture_0;
uniform vec3 uColor;
varying vec2 varTexCoords;
void main(void)
{
//vec4 col = texture2D(texture_0, varTexCoords);
vec4 col = vec4(0.0, 0.0, 0.0, 0.5);
gl_FragColor = col;
}
Can someone explain to me why:
Works:
vec4 col = texture2D(texture_0, varTexCoords);
//vec4 col = vec4(0.0, 0.0, 0.0, 0.5);
gl_FragColor = col;
Doesn't work:
//vec4 col = texture2D(texture_0, varTexCoords);
vec4 col = vec4(0.0, 0.0, 0.0, 0.5);
gl_FragColor = col;
Works:
vec4 col = texture2D(texture_0, varTexCoords);
col.rgb = uColor;
//col.a = 0.5;
gl_FragColor = col;
Also works:
vec4 col = texture2D(texture_0, varTexCoords);
col.rgb = uColor;
col.a *= 0.5;
gl_FragColor = col;
Doesn't work:
vec4 col = texture2D(texture_0, varTexCoords);
col.rgb = uColor;
col.a = 0.5;
gl_FragColor = col;
And this one dosen't work even though many examples seem to use it:
gl_FragColor = vec4(0.0, 0.0, 0.0, 1.0);
Error occurence in code is here:
glEnableVertexAttribArray(textureCoords);
CHECK_GL_ERROR("glEnableVertexAttribArrayCheck");
All code related to the shader:
inline void Renderer::renderText(float x, float y, string msg) {
mat4 proj;
Matrix::projection2D(proj,
(float) nScreenWidth_, (float) nScreenHeight_, 0.0f);
mat4 res, restmp;
mat4 pos;
mat4 rot;
mat4 scale;
//Vector3D p(72.0f, 88.0f, 1.0f);
//Vector3D p(20.0f, 20, 1.0f);
Vector3D r(0.0f, 0.0f, 0.0f);
Vector3D s(1.0f, nScreenWidth_ / nScreenHeight_, 1.0f);
//Matrix::translate(pos, p.getX(), p.getY(), p.getZ());
//Matrix::rotateZ(rot, r.getZ());
float widthMod = nScreenWidth_ / 100.0f;
float heightMod = nScreenHeight_ / 100.0f;
Matrix::translate(pos, x * widthMod, y * heightMod, 1.0f);
Matrix::rotateZ(rot, r.getZ());
//Matrix::scale(scale, s.getX() * widthMod, s.getY() * heightMod, 1.0f);
Matrix::scale(scale, 16.0f, 16.0f, 1.0f);
Matrix::multiply(proj, pos, res);
Matrix::multiply(res, rot, restmp);
Matrix::multiply(restmp, scale, res);
// Select shader program to use.
int shaderId = features_->getText()->getShaderId();
glUseProgram(shaderId);
CHECK_GL_ERROR("glUseProgram");
int matrix = glGetUniformLocation(shaderId, "uWVP");
int color = glGetUniformLocation(shaderId, "uColor");
int texture = glGetUniformLocation(shaderId, "texture_0");
CHECK_GL_ERROR("glGetUniformLocation");
int textureCoords = glGetAttribLocation(shaderId, "attrTexCoords");
int vertices = glGetAttribLocation(shaderId, "attrPos");
CHECK_GL_ERROR("glGetAttribLocation");
// Specify WVP matrix.
glUniformMatrix4fv(matrix, 1, false, res);
CHECK_GL_ERROR("glUniformMatrix4fv");
// Bind the texture.
glActiveTexture(GL_TEXTURE0);
CHECK_GL_ERROR("glActiveTexture");
glBindTexture(GL_TEXTURE_2D, features_->getText()->getFontMapId());
CHECK_GL_ERROR("glBindTexture");
glUniform1i(texture, 0);
CHECK_GL_ERROR("glUniform1i");
glEnableVertexAttribArray(vertices);
CHECK_GL_ERROR("glEnableVertexAttribArray");
glBindBuffer(GL_ARRAY_BUFFER, 0);
CHECK_GL_ERROR("glBindBuffer");
glEnable(GL_BLEND);
CHECK_GL_ERROR("glEnable");
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
CHECK_GL_ERROR("glBlendFunc");
//string text = output_;
for (unsigned int i = 0; i < msg.length(); i++) {
unsigned short l = static_cast<unsigned short>(msg[i]) - 32;
mat4 delta, resmove;
Matrix::translate(delta, 1.6f, 0.0f, 0.0f);
Matrix::multiply(res, delta, resmove);
Matrix::copy(resmove, res);
glUniformMatrix4fv(matrix, 1, false, res);
CHECK_GL_ERROR("glUniformMatrix4fv");
float col[] = {0.0f, 1.0f, 0.0f};
glUniform3fv(color, 1, col);
CHECK_GL_ERROR("glUniform3fv");
glVertexAttribPointer(vertices, 3, GL_FLOAT, GL_FALSE, 0,
features_->getText()->vertices_);
CHECK_GL_ERROR("glVertexAttribPointer");
glEnableVertexAttribArray(textureCoords);
CHECK_GL_ERROR("glEnableVertexAttribArrayCheck");
glVertexAttribPointer(textureCoords, 2, GL_FLOAT, GL_FALSE, 0,
features_->getText()->getSymbol(l));
CHECK_GL_ERROR("glVertexAttribPointer");
glDrawArrays(GL_TRIANGLES, 0, 18 / 3);
CHECK_GL_ERROR("glDrawArrays");
}
glDisable(GL_BLEND);
CHECK_GL_ERROR("glDisable");
}
The error is GL_INVALID_VALUE and only occurs after executing code, not after compiling and linking shader.
This is probably what is happening :
(I say "compiler" here, but it's probably the linker that does the actual purging)
The shader compliler drops this one :
varying vec2 varTexCoords;
If the compiler determines that a variable is not used, it will be discarded.
The last example is good :
vec4 col = texture2D(texture_0, varTexCoords);
col.rgb = uColor;
col.a = 0.5;
gl_FragColor = col;
The compiler understands that the original value in col is overwritten by the uColor uniform and the 0.5 constant. The texture read is dropped, so the varying is also dropped.
Then your attrTexCoords will also most likely be dropped, so your textureCoords variable containing the attrib locations is -1.
Here on the other hand, the compiler cannot remove the texture read because col.bg will contain values from the texture.
vec4 col = texture2D(texture_0, varTexCoords);
col.r = uColor.r;
col.a = 0.5;
gl_FragColor = col;
Related
I'm trying to change the position of triangle without using transformation function, By changing only the position of x each time,
this my code in main while loop
float MyPoints[] = { 0.1 , 0.2, 0.3, 0.4, 0.5 , 0.6, 0.7, 0.8, 0.9};
int offset = (-1, 1);
for (int i = 0; i < sizeof(MyPoints); i++) {
offset += MyPoints[i];
ourShader.Use();
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
glBindVertexArray(0);// unbind
}
and this is in shader
out vec3 ourColor;
out vec2 TexCoord;
uniform vec4 offset;
void main()
{
gl_Position = vec4(position.x + offset, position.y, position.z, 1.0f);
ourColor = color;
TexCoord = texCoord;
}
Edit
this my code in main while loop
float offset = 1.0f;
float step = 0.001f; //move
int i=0;
// Loop until window closed (Game loop)
while (!glfwWindowShouldClose(mainWindow))
{
// Get + Handle user input events
glfwPollEvents();
//Render
// Clear the colorbuffer
glClearColor(0.0f, 0.1f, 0.2f, 1.0f);
//glPointSize(400.0f);
glClear(GL_COLOR_BUFFER_BIT);
// Call Shader Program
//Rendering the first triangle
GLuint program =ourShader.Program ; // program object from "ourShader"
GLint offset_loc = glGetUniformLocation(program, "offset");
float MyPoints[] = { -0.1 , -0.2,-0.3,-0.4,-0.5 ,-0.6,-0.7,-0.8,-0.9 };
int noPoints = sizeof(MyPoints) / sizeof(float);
ourShader.Use();
for (int i = 0; i < noPoints; i++) {
glUniform1f(offset_loc, MyPoints[i] + offset);
}
offset += step;
if (MyPoints[i] + offset >= 1.0f || MyPoints[i] + offset <= -1.0f)
step *= -1.0f;
//update uniform data
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
glfwSwapBuffers(mainWindow);
glBindVertexArray(0);// unbind
}
and this is in shader
out vec3 ourColor;
out vec2 TexCoord;
uniform float offset;
void main()
{
gl_Position = vec4(position.x + offset, position.y, position.z, 1.0f);
ourColor = color;
TexCoord = texCoord;
}
the Edit code make an movement from (-1.0) till the middle to the end of the window
First of all the number of elements in the array is sizeof(MyPoints) / sizeof(float).
The type of the uniform variable offset has to be float:
uniform float offset;
You've to get the location of the uniform variable offset by glGetUniformLocation and to set the value of the uniform by e.g. glUniform1f:
GLuint program = ; // program object from "ourShader"
GLint offset_loc = glGetUniformLocation(program, "offset");
float MyPoints[] = { 0.1 , 0.2, 0.3, 0.4, 0.5 , 0.6, 0.7, 0.8, 0.9};
int noPoints = sizeof(MyPoints) / sizeof(float);
// bind vertex array
glBindVertexArray(VAO);
// install program
ourShader.Use();
float offset = -1.0f;
for (int i = 0; i < noPoints; i++) {
// set value of the uniform (after program is installed)
offset += MyPoints[i];
glUniform1f(offset_loc, offset);
// draw one triangle
glDrawArrays(GL_TRIANGLES, 0, 3);
}
glBindVertexArray(0);
If you want to make the triangles move, then you've to change the offset of each individual triangle in every frame. e.g.:
float offset = 0.0f;
float step = 0.01f;
while (!glfwWindowShouldClose(mainWindow))
{
// [...]
ourShader.Use();
glUniform1f(offset_loc, offset);
glDrawArrays(GL_TRIANGLES, 0, 3);
// [...]
// change offset
offset += step;
if (offset >= 1.0f || offset <= -1.0f)
step *= -1.0f; // reverse direction
}
I have recently used a freetype library to read text files and followed some guide on how to display text in 2D.
I tried to extend the code to support 3D text rendering but i started having opengl related problems with it.
At certain angles the text picture starts to fade, and the whole axis on which the text is located starts to inherit its colour.
Fading;
Black Slice
All the related code is:
Drawing function (inherited from learnopengl.com)
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
// Activate corresponding render state
glActiveTexture(GL_TEXTURE0);
glBindVertexArray(VAO);
glEnableVertexAttribArray(0);
scale /= RESOLUTION;
vec2 start(x, y);
// Iterate through all characters
std::string::const_iterator c;
for (c = text.begin(); c != text.end(); c++)
{
Character ch = Characters[*c];
if (*c == '\r' || (x-start.x > xMax && xMax != 0.0f))
{
y += ((ch.Advance >> 6) + 16) * scale ;
x = start.x;
continue;
}
GLfloat xpos = x + ch.Bearing.x * scale;
GLfloat ypos = y + (ch.Size.y - ch.Bearing.y) * scale;
GLfloat w = ch.Size.x * scale;
GLfloat h = ch.Size.y * scale;
// Update VBO for each character
GLfloat vertices[6][4] = {
{ xpos, ypos - h, 0.0, 0.0 },
{ xpos, ypos, 0.0, 1.0 },
{ xpos + w, ypos, 1.0, 1.0 },
{ xpos, ypos - h, 0.0, 0.0 },
{ xpos + w, ypos, 1.0, 1.0 },
{ xpos + w, ypos - h, 1.0, 0.0 }
};
// Render glyph texture over quad
glBindTexture(GL_TEXTURE_2D, ch.TextureID);
// Update content of VBO memory
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferSubData(GL_ARRAY_BUFFER, 0, sizeof(vertices), vertices);
// Render quad
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindBuffer(GL_ARRAY_BUFFER, 0);
// Now advance cursors for next glyph (note that advance is number of 1/64 pixels)
x += (ch.Advance >> 6) * scale; // Bitshift by 6 to get value in pixels (2^6 = 64)
}
glBindVertexArray(0);
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_BLEND);
Shader uniform initialization
ShaderBuilder::LoadShader(shader)->Add_mat4("projection", projection).Add_mat4("view", view).
Add_mat4("model", model).Add_vec3("textColor", color).Add_texture("text", 0);
Vertex Shader
#version 400 core
layout (location = 0) in vec4 vertex; //
out vec2 TexCoords;
uniform mat4 projection;
uniform mat4 view;
uniform mat4 model;
void main()
{
vec2 vertexGL = (vertex.xy - vec2(400,300)) / vec2(400,300);
vertexGL = vertex.xy;
vec4 position = projection * view * model * vec4(vertexGL.xy, 0.0, 1.0);
gl_Position = position / position.w;
TexCoords = vertex.zw;
}
Fragment Shader
#version 400 core
in vec2 TexCoords;
out vec4 color;
uniform sampler2D text;
uniform vec3 textColor;
void main()
{
vec4 sampled = vec4(1.0, 1.0, 1.0, texture(text, TexCoords).r);
color = vec4(textColor, 1.0) * sampled;
//color = vec4(1);
}
I finally found the mistake, for some unknown reason i thought normalizing my vertex coords after applying the matrix multiplication would be a good practice.
Apparently it isn't.
vec4 position = projection * view * model * vec4(vertexGL.xy, 0.0, 1.0);
gl_Position = position;// / position.w;
so as the commenting declares, this removed the mistake.
I have been learning modern OpenGL and in the tutorial I am following I am on framebuffers. There are many examples in the tutorial. So I can call the other buffers later I am putting all the individual buffers in their own functions. My problem though is anything that uses Kernel Effects doesn't work properly when placed in a function.
This is my fragment shader when I place my code in a function:
#version 330 core
in vec2 TexCoords;
out vec4 color;
uniform sampler2D screenTexture;
void EdgeDetection();
const float offset = 1.0 / 300;
void main()
{
EdgeDetection();
}
void EdgeDetection()
{
vec2 offsets[9] = vec2[](
vec2(-offset, offset), // top-left
vec2(0.0f, offset), // top-center
vec2(offset, offset), // top-right
vec2(-offset, 0.0f), // center-left
vec2(0.0f, 0.0f), // center-center
vec2(offset, 0.0f), // center-right
vec2(-offset, -offset), // bottom-left
vec2(0.0f, -offset), // bottom-center
vec2(offset, -offset) // bottom-right
);
float kernel[9] = float[](
1.0, 1.0, 1.0,
1.0, -8.0, 1.0,
1.0, 1.0, 1.0
);
vec3 sample[9];
for(int i = 0; i < 9; i++)
{
sample[i] = vec3(texture(screenTexture, TexCoords.st + offsets[i]));
}
//col = colour
vec3 col = vec3(0.0);
for(int i = 0; i < 9; i++)
col += sample[i] * kernel[i];
color = vec4(col, 1.0);
}
This is when the frag code is in main:
#version 330 core
in vec2 TexCoords;
out vec4 color;
uniform sampler2D screenTexture;
const float offset = 1.0 / 300;
void main()
{
vec2 offsets[9] = vec2[](
vec2(-offset, offset), // top-left
vec2(0.0f, offset), // top-center
vec2(offset, offset), // top-right
vec2(-offset, 0.0f), // center-left
vec2(0.0f, 0.0f), // center-center
vec2(offset, 0.0f), // center-right
vec2(-offset, -offset), // bottom-left
vec2(0.0f, -offset), // bottom-center
vec2(offset, -offset) // bottom-right
);
float kernel[9] = float[](
1.0, 1.0, 1.0,
1.0, -8.0, 1.0,
1.0, 1.0, 1.0
);
vec3 sample[9];
for(int i = 0; i < 9; i++)
{
sample[i] = vec3(texture(screenTexture, TexCoords.st + offsets[i]));
}
//col = colour
vec3 col = vec3(0.0);
for(int i = 0; i < 9; i++)
col += sample[i] * kernel[i];
color = vec4(col, 1.0);
}
When I put the code in main it works just fine. Any idea why it this is happening? I have included a pic of what happens when I run the program.
I am currently trying to draw billboards and some geometry with "modern opengl approach". Problem is that I cannot force billboards to keep their positions in space.
I need to link text positions with positions of another objects. Coordinates of text position are (3,3,3) and same coordinates has end of black line. In some positions I have exactly what I need: text is drawn at the end of line, but in some - it is too far from the end of line.
My render code:
public void Draw()
{
//Set up matrices
projectionMatrix = Matrix4.CreateOrthographic(_width, _height, -10000, 10000);
modelMatrix = Matrix4.Identity;
viewMatrix = Matrix4.CreateRotationY((float)xrot) *
Matrix4.CreateRotationX((float)yrot) *
Matrix4.CreateScale((float)scale);
var viewPort = new Rectangle(-(_width / 2), -(_height / 2), _width, _height);
var viewportTransformationMatrix = ComputeViewportTransformationMatrix(viewPort, -100, 100);
var viewportOrthographicMatrix = ComputeViewportOrthographicMatrix(viewPort);
worldViewProj = modelMatrix * viewMatrix * projectionMatrix;
//DRAW AXISES
GL.UseProgram(axisesProgramID);
axisesProgram.Uniforms["worldViewProj"].SetValue(worldViewProj);
axisesVAO.Bind();
for (int i = 0; i < 4; i++)
{
GL.DrawArrays(PrimitiveType.Lines, i * 2, 2);
}
//DRAW TEXT WITH PRE-CREATED TEXTURE
GL.UseProgram(textProgramID);
GL.ActiveTexture(TextureUnit.Texture0);
GL.BindTexture(TextureTarget.Texture2D, textureID);
//set-up uniforms
textProgram.Uniforms["og_viewportOrthographicMatrix"].SetValue(viewportOrthographicMatrix);
textProgram.Uniforms["og_viewportTransformationMatrix"].SetValue(viewportTransformationMatrix);
textProgram.Uniforms["Position"].SetValue(new float[] { 3.0f, 3.0f, 3.0f });
textProgram.Uniforms["projectionMatrix"].SetValue(projectionMatrix);
textProgram.Uniforms["modelViewMatrix"].SetValue(modelViewMatrix);
textProgram.Uniforms["og_texture0"].SetValue(0);
GL.DrawArrays(PrimitiveType.Points, 0, 1);
GL.BindTexture(TextureTarget.Texture2D, 0);
}
public Matrix4 ComputeViewportTransformationMatrix(Rectangle viewport, float nearDepthRange, float farDepthRange)
{
double halfWidth = viewport.Width * 0.5;
double halfHeight = viewport.Height * 0.5;
double halfDepth = (farDepthRange - nearDepthRange) * 0.5;
//
// Bottom and top swapped: MS -> OpenGL
//
return new Matrix4(
(float)halfWidth, 0.0f, 0.0f, (float)viewport.Left + (float)halfWidth,
0.0f, (float)halfHeight, 0.0f, (float)viewport.Top + (float)halfHeight,
0.0f, 0.0f, (float)halfDepth, (float)nearDepthRange + (float)halfDepth,
0.0f, 0.0f, 0.0f, 1.0f);
}
public static Matrix4 ComputeViewportOrthographicMatrix(Rectangle viewport)
{
//
// Bottom and top swapped: MS -> OpenGL
//
return Matrix4.CreateOrthographicOffCenter(
(float)viewport.Left, (float)viewport.Right,
(float)viewport.Top, (float)viewport.Bottom,
0.0f, 1.0f);
}
My axises shaders are really simple path-through.
//VERTEX SHADER
#version 150 core
in vec3 in_Position;
in vec3 in_Color;
out vec4 color;
uniform mat4 worldViewProj;
void main(void) {
gl_Position = worldViewProj * vec4(in_Position, 1.0);
color = vec4(in_Color, 1.0f);
}
//FRAGMENT SHADER
#version 150 core
in vec4 color;
out vec4 out_Color;
void main(void)
{
out_Color = color;
}
Here are text (texture) shaders:
//VERTEX SHADER
#version 330
out float gsOrigin;
out vec2 gsPixelOffset;
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
uniform mat4 og_viewportTransformationMatrix;
uniform float origin = 6; // TODO: Why does this not work when float is int?
uniform vec2 pixelOffset = vec2(0,0);
uniform vec3 Position;
vec4 ModelToWindowCoordinates(
vec4 v,
mat4 modelViewPerspectiveMatrix,
mat4 viewportTransformationMatrix)
{
v = modelViewPerspectiveMatrix * v; // clip coordinates
v.xyz /= v.w; // normalized device coordinates
v.xyz = (viewportTransformationMatrix * vec4(v.xyz, 1.0)).xyz; // window coordinates
return v;
}
void main()
{
gl_Position = ModelToWindowCoordinates ( vec4(Position, 1.0f) , modelViewMatrix * projectionMatrix , og_viewportTransformationMatrix ) ;
gsOrigin = origin;
gsPixelOffset = pixelOffset;
}
//GEOMETRY SHADER
#version 330
layout(points) in;
layout(triangle_strip, max_vertices = 4) out;
in float gsOrigin[];
in vec2 gsPixelOffset[];
out vec2 fsTextureCoordinates;
uniform sampler2D og_texture0;
uniform float og_highResolutionSnapScale;
uniform mat4 og_viewportOrthographicMatrix;
void main()
{
float originScales[3] = float[](0.0, 1.0, -1.0);
vec2 halfSize = vec2(textureSize(og_texture0, 0)) * 0.5 * og_highResolutionSnapScale;
vec4 center = gl_in[0].gl_Position;
int horizontalOrigin = int(gsOrigin[0]) & 3; // bits 0-1
int verticalOrigin = (int(gsOrigin[0]) & 12) >> 2; // bits 2-3
center.xy += (vec2(originScales[horizontalOrigin], originScales[verticalOrigin]) * halfSize);
center.xy += (gsPixelOffset[0] * og_highResolutionSnapScale);
vec4 v0 = vec4(center.xy - halfSize, 0, 1.0);
vec4 v1 = vec4(center.xy + vec2(halfSize.x, -halfSize.y), 0, 1.0);
vec4 v2 = vec4(center.xy + vec2(-halfSize.x, halfSize.y), 0, 1.0);
vec4 v3 = vec4(center.xy + halfSize, 0, 1.0);
gl_Position = og_viewportOrthographicMatrix * v0;
fsTextureCoordinates = vec2(0.0, 0.0);
EmitVertex();
gl_Position = og_viewportOrthographicMatrix * v1;
fsTextureCoordinates = vec2(1.0, 0.0);
EmitVertex();
gl_Position = og_viewportOrthographicMatrix * v2;
fsTextureCoordinates = vec2(0.0, 1.0);
EmitVertex();
gl_Position = og_viewportOrthographicMatrix * v3;
fsTextureCoordinates = vec2(1.0, 1.0);
EmitVertex();
}
//FRAGMENT SHADER
#version 330
in vec2 fsTextureCoordinates;
out vec4 fragmentColor;
uniform sampler2D og_texture0;
uniform vec3 u_color;
void main()
{
vec4 color = texture(og_texture0, fsTextureCoordinates);
if (color.a == 0.0)
{
discard;
}
fragmentColor = vec4(color.rgb * u_color.rgb, color.a);
}
To me it looks like there is some basic coordinate system confusion. I have not checked everything here, but to me,
worldViewProj = modelMatrix * viewMatrix * projectionMatrix;
looks like the wrong way round, as vertices should be multiplied from the right like
projection*view*model*vertex
The same issue is within your shaders.
Also, i am not entirely sure, but it seems you are computing pixel coordinates for gl_Position in the shader (as you are applying some viewporttransform in the function ModelToWindowCoordinates). Since pixel coordinates may e.g. range from 0,0 to 1920,1080 they are not correct for gl_Position, which should be in clip coordinates.
I think you should read up a good tutorial about 3d billboarding and the math, for example
this one looks quite interesting. Then modify the sample code to fit your needs step by step.
I am new to shader programming. I am trying to draw a circle with glsl. I used a point with a Size and tried to filter out the points outside the radius.(Altering the alpha value).
The code is as follows:
Fragment Shader:
#version 130
varying vec2 textureCoordinate;
const float circleBorderWidth = 0.08;//for anti aliasing
void main() {
float d = smoothstep(circleBorderWidth,0.1, 1.0-length(textureCoordinate));
gl_FragColor = vec4(0.0, 1.0, 0.0, d);
}
Vertex Shader:
#version 130
attribute vec4 coord3d;
attribute vec2 varPos;
varying vec2 textureCoordinate;
void
main()
{
textureCoordinate = varPos;
gl_FrontColor = gl_Color;
gl_Position = vec4(coord3d.xyz,1.);
gl_PointSize = coord3d.w;
}
Data:
float pos[] = {
-1, -1,
-1, 1,
1, 1,
1, -1,
};
float vertices[]={0.0,0.0f,0.0f,100.0f};
Draw Method:
void drawScene() {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
program->makeCurrent();
glEnable(GL_POINT_SMOOTH);
glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);
glEnable(GL_BLEND);
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
if (varPosAttrib>=0) {
glVertexAttribPointer( varPosAttrib, 2, GL_FLOAT, GL_FALSE,
0, pos ); // -->varPos in Vertex Shader.
glEnableVertexAttribArray( varPosAttrib );
}
if (posAttrib>=0) {
glVertexAttribPointer(posAttrib, 4, GL_FLOAT, GL_FALSE, 0, vertices); // -->coord3d in vertex shader
glEnableVertexAttribArray(posAttrib);
glDrawArrays(GL_POINTS, 0, 1);
}
glDisable(GL_POINT_SMOOTH);
glDisable(GL_VERTEX_PROGRAM_POINT_SIZE);
glDisable(GL_BLEND);
program->release();
glutSwapBuffers(); //Send the 3D scene to the screen
}
This results in drawing a square if I replace d with 1.0 in the following line (in the fragment shader):
gl_FragColor = vec4(0.0, 1.0, 0.0, d); // -> if d is replaced by 1.0
I tried to replace the x and y values in gl_FragColor with textureCoordinate.x and textureCoordinate.y. Result was black (so I assume the values are 0.0). The thing which I don't understand is that if I take the length of textureCoordinate than it is always 1.0.(experimented by replacing the value in gl_fragcolor). I am unable to figure out as to what I am doing wrong here. I was expecting the textureCoordinate value to interpolate with respect to the passed in data (varPos).
Here's my current attempt at it. It works, in the sense that it draw a disc with a smooth border. I use a distance field approach ie. I compute the distance from the disc's border
Fragment shader
#version 110
varying vec2 uv;
void
main() {
float border = 0.01;
float radius = 0.5;
vec4 color0 = vec4(0.0, 0.0, 0.0, 1.0);
vec4 color1 = vec4(1.0, 1.0, 1.0, 1.0);
vec2 m = uv - vec2(0.5, 0.5);
float dist = radius - sqrt(m.x * m.x + m.y * m.y);
float t = 0.0;
if (dist > border)
t = 1.0;
else if (dist > 0.0)
t = dist / border;
gl_FragColor = mix(color0, color1, t);
}
Vertex shader
#version 110
varying vec2 uv;
void
main() {
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
uv = vec2(gl_MultiTexCoord0);
}
It's meant to be drawn on quad with texture coordinate (-0.5, -0.5)x(0.5, 0.5)