I am trying to get a basic geometry shader to work, but I am completely failing. After checking numerous resources, I still cannot find a solution to my problem.
Here is my code for my vertex, geometry, and fragment shaders.
Vertex Shader:
#version 330 core
// Vertex Shader Inputs
layout (location = 0) in vec3 Pos;
layout (location = 1) in vec3 Norm;
layout (location = 2) in vec3 Color;
// Vertex to Fragment Shader Outputs
out DATA {
vec3 vsPos;
vec3 vsNorm;
vec4 vsColor;
} data_out;
// Main.cpp Imports
uniform mat4 camMatrix; // viewProjection Matrix
uniform mat4 model;
void main()
{
vec3 vsPos = vec3(model * vec4(Pos, 1.0f));
vec3 vsNorm = mat3(transpose(inverse(model))) * Norm; // Normal vector correction
vec4 vsColor = vec4(Color, 1.0f);
gl_Position = camMatrix * vec4(vsPos, 1.0f);
}
Geometry Shader:
#version 330 core
layout (triangles) in;
layout (triangle_strip, max_vertices = 3) out;
out vec3 gsPos;
out vec3 gsNorm;
out vec3 gsColor;
in DATA {
vec3 vsPos;
vec3 vsNorm;
vec4 vsColor;
} data_in[];
uniform mat4 camMatrix;
void main()
{
for (int i=0; i<3; i++)
{
gsPos = data_in[i].vsPos;
gsNorm = data_in[i].vsNorm;
gsColor = data_in[i].vsColor;
gl_Position = camMatrix * vec4(data_in[i].vsPos, 1.0f);
EmitVertex();
}
EndPrimitive();
}
Fragment Shader:
#version 330 core
out vec4 FragColor;
// Fragment Shader Inputs
in vec3 gsPos;
in vec3 gsNorm;
in vec4 gsColor;
// Fragment Shader Uniforms
uniform sampler2D diffuse0;
uniform sampler2D specular0;
uniform vec4 lightColor;
uniform vec3 lightPos;
uniform vec3 camPos;
vec4 pointLight()
{
vec3 lightVec = (lightPos - vsPos);
// intensity of light with respect to distance
float dist = length(lightVec);
float a = 0.7;
float b = 0.4;
float c = 1.0;
float inten = 1.0f / (a * dist * dist + b * dist + c);
// ambient lighting
float ambient = 0.75f;
// diffuse lighting
vec3 fsNorm = normalize(gsNorm);
vec3 lightDirection = normalize(lightVec);
float diffuse = max(dot(fsNorm, lightDirection), 0.0f);
// specular lighting
float specular = 0.0f;
if (diffuse != 0.0f)
{
float specularLight = 0.50f;
vec3 viewDirection = normalize(gsNorm - gsPos);
vec3 halfwayVec = normalize(viewDirection + lightDirection);
float specAmount = pow(max(dot(fsNorm, halfwayVec), 0.0f), 32);
specular = specAmount * specularLight;
};
return inten * (gsColor * (diffuse + ambient) + gsColor * specular) * lightColor;
}
void main()
{// outputs final color
FragColor = pointLight();
}
My mesh generation function:
void genMesh()
{
VAO.Bind();
VBO VBO(vtx);
EBO EBO(idx);
VAO.LinkAttrib(VBO, 0, 3, GL_FLOAT, sizeof(Vertex), (void*)0);
VAO.LinkAttrib(VBO, 1, 3, GL_FLOAT, sizeof(Vertex), (void*)(3 * sizeof(float)));
VAO.LinkAttrib(VBO, 2, 4, GL_FLOAT, sizeof(Vertex), (void*)(6 * sizeof(float)));
VAO.Unbind();
VBO.Unbind();
EBO.Unbind();
};
My mesh draw function:
void Mesh::Draw(Shader& shader, Camera& camera)
{
shader.Activate();
VAO.Bind();
// Take care of the camera Matrix
glUniform3f(glGetUniformLocation(shader.ID, "camPos"),
camera.Position.x,
camera.Position.y,
camera.Position.z);
camera.Matrix(shader, "camMatrix");
// Draw the actual mesh
glDrawElements(GL_TRIANGLES, idx.size() * sizeof(GLuint), GL_UNSIGNED_INT, 0);
};
I call my mesh generation function outside of the main while loop, then I draw the mesh in my main while loop.
Debugging my program through RenderDoc gives me the error, "No vertex shader bound at draw!" Without the geometry shader (keeping everything else roughly the same), I do not get any errors in RenderDoc. I tried updating my graphics drivers, but I am just getting the same error. Please help me, I feel like I am losing my mind.
In the fragment shader gsColor is defined at a vec4 variable but in the geometry shader it is declared a vec3 variable
Related
Currently I am rendering mesh triangles like this:
// draw the same polygons again
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
shader.setVec3("objectColor", obj_color);
glDrawElements(GL_TRIANGLES, static_cast<unsigned int>(indices.size()), GL_UNSIGNED_INT, 0);
The problem with this code is that I am setting object color inside shader for the full mesh.
What would be a good way to render one single mesh whose faces have different colors?
For now I only know how to set vertex colors, and pass it the fragment shader.
What are the most common ways to set individual face colors?
I only think about duplicating mesh vertices twice to avoid vertex color interpolation.
My current shader looks like this:
Vertex Shader:
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aNormal;
out vec3 FragPos;
out vec3 Normal;
out vec3 LightPos;
uniform vec3 lightPos;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main()
{
gl_Position = projection * view * model * vec4(aPos, 1.0);
FragPos = vec3(view * model * vec4(aPos, 1.0));
Normal = mat3(transpose(inverse(view * model))) * aNormal;
LightPos = vec3(vec4(lightPos, 1.0)); // Transform world-space light position to view-space light position
// FragPos = vec3(model * vec4(aPos, 1.0));
//Normal = mat3(transpose(inverse(model))) * aNormal;
// gl_Position = projection * view * vec4(FragPos, 1.0);
}
Fragment Shader:
#version 330 core
out vec4 FragColor;
in vec3 FragPos;
in vec3 Normal;
in vec3 LightPos;
// extra in variable, since we need the light position in view space we calculate this in the vertex shader
uniform vec3 lightColor;
uniform vec3 objectColor;
uniform float f;
uniform float transparency;
void main()
{
//flat shading
// vec3 x_ = dFdx(FragPos);
// vec3 y_= dFdy(FragPos);
// vec3 normal_ = cross(x_, y_);
// vec3 norm_ = normalize(normal_);
// ambient
float ambientStrength = 0.75;
vec3 ambient = ambientStrength * lightColor;
// diffuse
vec3 norm = normalize(Normal);
vec3 lightDir = normalize(LightPos - FragPos);
float diff = max(dot(norm, lightDir), 0.0);//change "norm_" to "norm" avoid the performance warning and have unwelded view
vec3 diffuse = diff * lightColor;
// specular
float specularStrength = 0.01;
vec3 viewDir = normalize(-FragPos); // the viewer is always at (0,0,0) in view-space, so viewDir is (0,0,0) - Position => -Position
vec3 reflectDir = reflect(-lightDir, norm);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), 32);
vec3 specular = specularStrength * spec * lightColor;
vec3 shading = (ambient + diffuse + specular)*objectColor;
//float f = 0.75;
float r_interpolated = shading[0] + f * (objectColor[0] - shading[0]);
float g_interpolated = shading[1] + f * (objectColor[1] - shading[1]);
float b_interpolated = shading[2] + f * (objectColor[2] - shading[2]);
vec3 result = vec3(r_interpolated,g_interpolated,b_interpolated);
FragColor = vec4(result, transparency);
}
You can use the flat Interpolation qualifier:
The value will not be interpolated. The value given to the fragment shader is the value from the Provoking Vertex for that primitive.
Vertex shader
// [...]
layout (location = 0) in vec3 aColor;
flat out vec3 vColor;
void main()
{
vColor = aColor;
// [...]
}
Fragment shader
// [...]
flat in vec3 vColor;
void main()
{
FragColor = vec4(vColor, 1.0);
}
With this implementation, the entire triangle primitive is rendered with one color. If you find an intelligent system for assigning the color attributes to the vertices, you can render all triangles with different colors. e.g. 2 tringles with the indices 0-1-2 and 1-2-3. The color attribute of vertex 0 defines the color of the first triangle and the color attribute of vertex 1 defines the color of the 2nd triangle.
An alternative way would be to create an array of colors for each triangle primitive and store this color array in a Shader Storage Buffer Object. Use gl_VertexID to address the color in the vertex shader.
layout(std430, binding = 0) buffer primitiveColors
{
vec4 colors[];
};
void main()
{
vColor = colors[gl_VertexID / 3];
// [...]
}
I'm writing a small "engine", and the time has finally come to implement transformations. However, when I try to glGetUniformLocation, it return -1. Here is my rendering method:
void GFXRenderer::submit(EntityBase* _entity, GPUProgram _program) {
if(_entity->mesh.has_value()) {
mat4 mod_mat(1.0);
//mod_mat = translate(mod_mat, _entity->transform.position);
/*
mod_mat = scale(mod_mat, _entity->transform.scale);
mod_mat = rotate(mod_mat, radians(_entity->transform.rotation.x), vec3(1.0, 0.0, 0.0));
mod_mat = rotate(mod_mat, radians(_entity->transform.rotation.y), vec3(0.0, 1.0, 0.0));
mod_mat = rotate(mod_mat, radians(_entity->transform.rotation.z), vec3(0.0, 0.0, 1.0));
*/
mod_mat = translate(mod_mat, vec3(0.5f, -0.5f, 0.0f));
//mod_mat = glm::rotate(mod_mat, (float)glfwGetTime(), glm::vec3(0.0f, 0.0f, 1.0f));
glUseProgram(_program.id);
int transform = glGetUniformLocation(_program.vsh.id, "transform");
std::cout << transform << std::endl;
glUniformMatrix4fv(transform, 1, GL_FALSE, value_ptr(mod_mat));
glUseProgram(_program.id);
glBindTexture(GL_TEXTURE_2D, _entity->mesh->tex_id);
glBindVertexArray(_entity->mesh->vao);
glDrawElements(GL_TRIANGLES, _entity->mesh->indices.size(), GL_UNSIGNED_INT, 0);
glUseProgram(0);
glBindVertexArray(0);
}
}
Here EntityBase is an object class. It contains a transform class, as follows:
class Transform {
public:
vec3 position;
vec3 rotation;
vec3 scale;
quat q_rot;
mat4x4 matrix;
};
Ignore the quaternion and matrix. ALso, I must mention that without doing transformation - it renders flawlessly. (SIKE)
Here is my vsh :
#version 460 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec2 aTex;
out vec2 tex_coord;
out mat4 f_tr_opt; // for stopping optimization
uniform mat4 transform;
void main() {
tex_coord = aTex;
f_tr_opt = transform;
gl_Position = transform * vec4(aPos, 1.0);
}
Here is my fsh:
#version 460 core
in vec2 tex_coord;
in mat4 f_tr_opt; // again, same thing
out vec4 FragColor;
uniform sampler2D texture0;
void main() {
mat4 garbage = f_tr_opt * f_tr_opt; // named garbage for easier recognition
FragColor = texture(texture0, tex_coord);
}
I check for compile and linking errors, all is fine. Please correct me as to what I am doing wrong here.
See glGetUniformLocation. The uniform location must be requested from the linked program object, not from the (vertex) shader object:
int transform = glGetUniformLocation(_program.vsh.id, "transform");
int transform = glGetUniformLocation(_program.id, "transform");
trying to implement shadow. I checked my depth texture on a quad, and it seems correct, but the shadow is not displaying. I check my shadow vertex and fragment shaders, and I believe I have done the light space transformation correctly.
Here are my code.
directional light source matrix setup:
//light source states
glm::vec3 Window::lightColor = glm::vec3(0.9f, 0.9f, 0.9f);
glm::vec3 Window::lightDir = glm::vec3(-1.f, -1.f, 0.f);
glm::mat4 Window::lightView = glm::lookAt(glm::vec3(0.f) - glm::normalize(lightDir) * 15.f, glm::vec3(0.0f), glm::vec3(0.f, 1.f, 0.f));
float Window::near_plane = 0.01f;
float Window::far_plane = 50.1f;
float camWidth = 10.f;
glm::mat4 Window::lightProj = glm::ortho(-10.f, 10.f, -10.f, 10.f, Window::near_plane, Window::far_plane);
glm::mat4 Window::lightProjView = lightProj * lightView;
shadow drawing logic:
void Renderer::drawWithShadow(Object* obj) {
//set shader uniforms
Shader* shader = shadowShader;
shader->bind();
shader->setUniformMat4("model", obj->model);
shader->setUniformMat4("projView", projView);
shader->setUniformVec3("viewPos", eyePos);
//need another projection matrix
shader->setUniformMat4("lightSpaceMatrix", shadowProjView);
glcheck(glActiveTexture(GL_TEXTURE0));
glcheck(glBindTexture(GL_TEXTURE_2D, textID));
//light uniforms
shader->setUniformVec3("directionalLightDir", directionalLightDir);
shader->setUniformVec3("lightColor", lightColor);
glcheck(glBindVertexArray(obj->vao));
for (auto i = 0; i < obj->meshList.size(); i++) {
Mesh* mesh = obj->meshList[i];
prepMaterial(mesh->material, shader);
glcheck(glDrawElements(GL_TRIANGLES, mesh->size, GL_UNSIGNED_INT, (GLvoid*)(sizeof(GLuint) * mesh->vertexOffset)));
}
}
vert and frag shaders to prepare shadow depth textures
//vertex shader
#version 330 core
layout (location = 0) in vec3 position;
uniform mat4 projView;
uniform mat4 model;
void main() {
gl_Position = projView * model * vec4(position, 1.0);
}
//fragment shader
#version 330 core
void main()
{
}
vert and frag shaders to draw shadows with Phong lighting
//vertex shader
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec3 normal;
layout (location = 2) in vec2 texCoord;
out VS_OUT {
vec4 fragPos;
vec3 normal;
vec2 texCoord;
vec4 fragPosLightSpace;
} vs_out;
uniform mat4 projView;
uniform mat4 model;
uniform mat4 lightSpaceMatrix;
void main()
{
vs_out.fragPos = model * vec4(position, 1.0);
vs_out.normal = transpose(inverse(mat3(model))) * normal;
vs_out.texCoord = texCoord;
vs_out.fragPosLightSpace = lightSpaceMatrix * vs_out.fragPos;
gl_Position = projView * vs_out.fragPos;
}
//fragment shader
#version 330 core
uniform vec3 viewPos; //just the eye pos
uniform vec3 diffuseFactor; //kd
uniform vec3 ambientColor; //ka
uniform vec3 specColor; //ks
uniform float specHighlight; //ns, the larger this value is, the more apparent the light dot on the surface
uniform float dissolve; //d
//lights
uniform vec3 directionalLightDir;
uniform vec3 pointLightPos;
uniform vec3 lightColor;
uniform sampler2D shadowMap;
//uniform sampler2DShadow shadowMap;
in VS_OUT {
vec4 fragPos;
vec3 normal;
vec2 texCoord;
vec4 fragPosLightSpace;
} fs_in;
out vec4 fragColor;
float ShadowCalculation(vec4 fragPosLightSpace)
{
vec3 projCoords = fragPosLightSpace.xyz / fragPosLightSpace.w;
vec2 shadowCoords;
shadowCoords.x = projCoords.x * 0.5 + 0.5;
shadowCoords.y = projCoords.y * 0.5 + 0.5;
float closestDepth = texture(shadowMap, shadowCoords).r;
float currentDepth = projCoords.z * 0.5 + 0.5;
float shadowValue = currentDepth + 0.00001 > closestDepth ? 1.0 : 0.0;
//if(currentDepth < 0.0)
//shadowValue = 0.0;
return shadowValue;
}
void main()
{
vec3 lightDir = normalize(-directionalLightDir);
vec3 norm = normalize(fs_in.normal);
//diffuse lighting
float diffStrength = max(dot(norm, lightDir), 0.0); // this calculates diffuse intensity based on angle
vec3 diffuse = lightColor * diffStrength * diffuseFactor;
//specular
vec3 viewDir = normalize(viewPos - fs_in.fragPos.xyz);
vec3 reflectDir = reflect(-lightDir, norm);
float spec = 0.0;
if(specHighlight > 0.0) { // if specHighlight is < 0, pow might produce undefined result if base is also 0
spec = pow(max(dot(viewDir, reflectDir), 0.0), specHighlight);
}
vec3 specular = spec * specColor * lightColor;
float shadow = ShadowCalculation(fs_in.fragPosLightSpace);
//float shadow = textureProj(shadowMap, fs_in.fragPosLightSpace);
//vec3 result = ambientColor * 0.05 * lightColor + (diffuse + specular)*(1-shadow);
vec3 result = (diffuse + specular)*(1.0 - shadow);
fragColor = vec4(result, 1);
}
with just Phong shading, the scene looks like this:
Phong shading
when the scene is seen from the light source as depth value:
depth texture on quad
when I finally render the scene, it is mostly black; I made sure the far plane covers all of the bunnies:
render shadow
I met some problems when implementing diffuse light.
correct result may look like the picture on left diffuse light picture
The right side is incorrect result.
The problems are below
1.There is no lighting effect in the beginning.I have to rotate the object to some degree the lighting effect will appear.
2.The object mixed some triangle in it. (I have checked my read file(.obj &.mtl) part already. It's correct.)
3.I only turn on the diffuse light. But the lighting effect seems to ambient light.
My light source position is (0, 0, 5), eye position is(0, 0, 0);
my vertex shader
attribute vec4 vertexPosition;
attribute vec3 vertexNormal_objectSpace;
varying vec4 vv4color;
uniform mat4 mvp; //model, viewing, projection transformation matrix
uniform mat4 NormalMatrix;
struct LightSourceParameters
{
vec4 ambient;
vec4 diffuse;
vec4 specular;
vec4 position;
vec4 halfVector;
vec3 spotDirection;
float spotExponent;
float spotCutoff; // (range: [0.0,90.0], 180.0)
float spotCosCutoff; // (range: [1.0,0.0],-1.0)
float constantAttenuation;
float linearAttenuation;
float quadraticAttenuation;
};
struct MaterialParameters
{
vec4 ambient;
vec4 diffuse;
vec4 specular;
float shininess;
};
uniform MaterialParameters Material;
uniform LightSourceParameters LightSource[3]; //because I have to implement three light sources(directional, point, specular light)
void main()
{
vec3 normal, TransformedNormal, lightDirection;
vec4 ambient, diffuse;
float NdotL;
ambient = LightSource[0].ambient * Material.ambient;
TransformedNormal = vec3(vec4(vertexNormal_objectSpace, 0.0) * NormalMatrix);
normal = normalize(TransformedNormal);
lightDirection = normalize(vec3(LightSource[0].position));
NdotL = max(dot(normal, lightDirection), 0.0);
diffuse = LightSource[0].diffuse * Material.diffuse * NdotL;
vv4color = ambient + diffuse; //vv4color pass to fragment shader
gl_Position = mvp * vertexPosition;
}
my display function
void onDisplay(void)
{
Matrix4 MVP, modelView, NormalMatrix;
int i=0;
// clear canvas
glClearColor(0.5f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnableVertexAttribArray(iLocPosition);
glEnableVertexAttribArray(iLocNormal);
geo_rotate = geo_rotate_x * geo_rotate_y * geo_rotate_z;
Geo = geo_rotate * geo_scale * geo_trans;
modelView = View * Geo;
modelView = modelView.transpose(); //row-major -> column-major
modelView = modelView.invert(); //normal transformation(transpose after inverse)
NormalMatrix = modelView.transpose();
MVP = Proj * View * Geo * Norm;
MVP = MVP.transpose();
glUniformMatrix3fv(iLocEyePosition, 1, GL_FALSE, &eye[0]);
glUniformMatrix4fv(iLocNormalMatrix, 1, GL_FALSE, &NormalMatrix[0]); //bind uniform matrix to shader
glUniformMatrix4fv(iLocMVP, 1, GL_FALSE, &MVP[0]);
group = OBJ->groups;
for(i=0; i<OBJ->numgroups-1; i++)
{
//pass model material value to the shader
glUniform4fv(iLocMAmbient, 1, material[i].ambient);
glUniform4fv(iLocMDiffuse, 1, material[i].diffuse);
glUniform4fv(iLocMSpecular, 1, material[i].specular);
glUniform1f(iLocMShininess, material[i].shininess);
glVertexAttribPointer(iLocPosition, 3, GL_FLOAT, GL_FALSE, 0, V[i]); //bind array pointers to shader
glVertexAttribPointer(iLocNormal, 3, GL_FLOAT, GL_FALSE, 0, N[i]);
glDrawArrays(GL_TRIANGLES, 0, group->numtriangles*3); //draw the array we just bound
group = group->next;
}
glutSwapBuffers();
}
Thank you all of you.
I'd like to display a simple UV sphere (exported from Blender) and generate lines with normal coordinates using a unique geometry shader.
In a first time, I wrote a simple geometry shader which simply return the input vertices informations to the fragment shader. For a sake of simplicity (for the exemple) I erased the luminosity calculations in the fragment shader.
Vertex shader :
#version 400
layout (location = 0) in vec3 VertexPosition;
layout (location = 1) in vec3 VertexNormal;
uniform mat4 MVP;
out vec3 VPosition;
out vec3 VNormal;
void main(void)
{
VNormal = VertexNormal;
gl_Position = vec4(VertexPosition, 1.0f);
}
Geometry shader :
#version 400
layout(points) in;
layout(line_strip, max_vertices = 2) out;
uniform mat4 MVP;
in vec3 VNormal[];
out vec3 fcolor;
void main(void)
{
float size = 2.5f;
fcolor = vec3(0.0f, 0.0f, 1.0f);
gl_Position = MVP * gl_in[0].gl_Position;
EmitVertex();
fcolor = vec3(1.0f, 1.0f, 0.0f);
gl_Position = MVP * vec4(gl_in[0].gl_Position.xyz + vec3(
VNormal[0].x * size, VNormal[0].y * size, VNormal[0].z * size), 1.0f);
EmitVertex();
EndPrimitive();
}
And the fragment shader :
#version 400
in vec3 Position;
in vec3 Normal;
in vec2 TexCoords;
out vec4 FragColor;
in vec3 fcolor;
void main(void)
{
FragColor = vec4(fcolor, 1.0f);
}
Now in the C++ code the primitive type to draw (here triangles):
glDrawArrays(GL_TRIANGLES, 0, meshList[idx]->getVertexBuffer()->getBufferSize());
And finally the output :
Until here all is ok.
Now I want to generate strands on the sphere as normals. To do the job done I wrote the following geometry shader (the vertex and fragment shaders are the sames).
#version 400
layout(points) in;
layout(line_strip, max_vertices = 2) out;
uniform mat4 MVP;
in vec3 VNormal[];
out vec3 fcolor;
void main(void)
{
float size = 1.0f;
fcolor = vec3(0.0f, 0.0f, 1.0f);
gl_Position = MVP * gl_in[0].gl_Position;
EmitVertex();
fcolor = vec3(1.0f, 1.0f, 0.0f);
gl_Position = MVP * vec4(gl_in[0].gl_Position.xyz + vec3(
VNormal[0].x * size, VNormal[0].y * size, VNormal[0].z * size), 1.0f);
EmitVertex();
EndPrimitive();
}
The input primitive type being points I modified the C++ code to draw the scene :
glDrawArrays(GL_POINTS, 0, meshList[idx]->getVertexBuffer()->getBufferSize());
And the output:
Finally if I want to get a triangle input as input primitive and a line_strip as output primitive in the geometry shader I have the following shader:
#version 400
layout(triangles, invocations = 3) in;
layout(line_strip, max_vertices = 6) out;
uniform mat4 MVP;
in vec3 VNormal[];
out vec3 fcolor;
void main(void)
{
float size = 1.0f;
for (int i = 0; i < 3; i++)
{
fcolor = vec3(0.0f, 0.0f, 1.0f);
gl_Position = MVP * gl_in[i].gl_Position;
EmitVertex();
fcolor = vec3(1.0f, 1.0f, 0.0f);
gl_Position = MVP * vec4(gl_in[0].gl_Position.xyz + vec3(
VNormal[0].x * size, VNormal[0].y * size, VNormal[0].z * size), 1.0f);
EmitVertex();
EndPrimitive();
}
}
And the output is the following :
But my goal is to display in one output the scene (sphere + strands) using the same geometry shader. I'd like to know if it's possible to do this. I don't think so because a geometry shader must have just one type of input primitive and an other one in output and not several types. I want to be sure if it's possible or not.
Who knows, maybe one day there'll be an extension to emit multiple primitive types from a geometry shader, but as you say it can't currently be done.
One alternative might be to draw the normal lines with triangles instead.
Another, but completely useless in this case, might be to use the transform feedback extension to save the vertex shader results and reuse that data with two separate geometry shaders. I only mention this as it's the closest thing I could think of to emit multiple primitive types after the vertex stage.
EDIT
The two geometry shaders for drawing normals confuses me. In the second one, max_vertices = 3, which should be 6 for 3 separate lines and EndPrimitive should also be inside the for-loop so the 3 lines aren't connected. But you've already sorted this out by drawing GL_POINTS in the previous one. Is this intended to be structured for multiple primitive output, if it were supported? (fixed)
Given your geometry reuses many vertices, indices with glDrawElements would be more efficient. Although you'd still want to use glDrawArrays for drawing normal lines to avoid drawing duplicate vertices referenced by an index array.