I have been researching different techniques for rendering grass. I've decided to go with a Geometry shader generated grass mainly so I can generate triangle fans on the fly when I render them as GL_POINTS but I'm not seeing the performance I'd like to see. I'm getting maybe 20-50 fps with 100,000 blades of grass, and I have a decent GPU. I'm wondering if my approach is wrong, or if I'm reaching the limitations of my GPU or If I am doing something incorrectly or maybe if their is a faster way (My aim is individual blades where I can manipulate the vertices ideally). The texture I am using 256x256
My rendering steps are:
Creation of the VAO and VBO and storing locations and binding once:
float[] GrassLocations= new float[100000];
int vaoID = createVAO();
. //bind VBO to VAO
storeDataInAttributeList(0, 3, GrassLocations,0,0);
I then render:
GL30.glBindVertexArray(VAO);
GL20.glEnableVertexAttribArray(0);
GL13.glActiveTexture(GL13.GL_TEXTURE0);
GL11.glBindTexture(GL11.GL_TEXTURE_2D, texture);
GL11.glDrawArrays(GL11.GL_POINTS, 0, 100000);
GL20.glDisableVertexAttribArray(0);
GL30.glBindVertexArray(0);
then My Vertex Shader:
#version 400
layout (location = 0) in vec3 VertexLocation;
uniform float time;
out vec3 offsets;
out vec3 Position;
out vec3 Normal;
out vec2 TexCoord;
out float visibility;
uniform mat4 transformationMatrix;
uniform mat4 viewMatrix;
uniform mat4 MVPmatrix;
uniform mat4 modelViewMatrix;
const float density = .007;
const float gradient = 1.5;
out float Time;
void main()
{
Time = time;
vec4 worldPosition = transformationMatrix * vec4(VertexLocation,1.0);
vec4 positionRelativeToCam = modelViewMatrix* vec4(VertexLocation,1.0);
Normal = vec3(0,1,0);
Position = vec3( worldPosition );
gl_Position = MVPmatrix* vec4(VertexLocation,1.0);
float distance = length(positionRelativeToCam.xyz);
visibility = exp(-pow((distance * density), gradient));
visibility = clamp(visibility,0.0,1.0);
offsets = offset;
}
I did gut the vertex shader and left only GL_POSITION and still not the issue.
My Geometry Shader:
#version 400
layout( points ) in;
layout( triangle_strip, max_vertices = 10 ) out;
float Size2=1; // Half the width of the quad
in vec3 Position[];
in vec3 Normal[];
in vec3 offsets[];
out vec3 position;
out vec3 normal;
in float Time[];
out vec2 TexCoord;
out vec3 color;
const float width = 5;
void main()
{
position = Position[0];
normal = Normal[0];
color = offsets[0];
gl_Position = (vec4(-Size2*width,-Size2,0.0,0.0) + gl_in[0].gl_Position);
TexCoord = vec2(0.0,0.0);
EmitVertex();
gl_Position = (vec4(Size2*width,-Size2,0.0,0.0) + gl_in[0].gl_Position);
TexCoord = vec2(1.0,0.0);
EmitVertex();
gl_Position = (vec4(-Size2*width+(Time[0].x),10,0.0,0.0) +
gl_in[0].gl_Position);
TexCoord = vec2(0.0,.25);
EmitVertex();
gl_Position = (vec4(Size2*width+(Time[0].x),10,0.0,0.0) +
gl_in[0].gl_Position);
TexCoord = vec2(1.0,.25);
EmitVertex();
///////////////////////////////////////////////////
gl_Position = (vec4(-Size2*width+(Time[0].x)*2,15,0.0,0.0) +
gl_in[0].gl_Position);
TexCoord = vec2(0.0,.50);
EmitVertex();
gl_Position = (vec4(Size2*width+(Time[0].x)*2,15,0.0,0.0) +
gl_in[0].gl_Position);
TexCoord = vec2(1.0,.50);
EmitVertex();
///////////////////////////////////////////////////
gl_Position = (vec4(-Size2*width+(Time[0].x)*3,25,0.0,0.0) +
gl_in[0].gl_Position);
TexCoord = vec2(0.0,.75);
EmitVertex();
gl_Position = (vec4(Size2*width+(Time[0].x)*3,25,0.0,0.0) +
gl_in[0].gl_Position);
TexCoord = vec2(1.0,.75);
EmitVertex();
///////////////////////////////////////////////////
gl_Position = (vec4(-Size2*width,Size2*7,Time[0].x,0.0) +
gl_in[0].gl_Position);
TexCoord = vec2(0.0,1.0);
EmitVertex();
gl_Position = (vec4(Size2*width,Size2*7,Time[0].x,0.0) +
gl_in[0].gl_Position);
TexCoord = vec2(1.0,1.0);
EmitVertex();
}
and my fragment Shader: (This is in a deferred engine, I've tried it with forward rendering also and I don't think performance hit is here)
#version 400
in vec2 TexCoord;
layout (binding=0) uniform sampler2D SpriteTex;
in vec3 color;
in vec3 normal;
in vec3 position;
layout( location = 0 ) out vec4 FragColor;
void main() {
vec4 texColor = texture(SpriteTex,TexCoord);
vec4 posColor = vec4(position.xyz,0);
gl_FragData[1] = posColor;
gl_FragData[2] = vec4(normal,1);
if(texColor.a<.5){
discard;
}
gl_FragData[0] = texColor;
}
What you want is a technique called Instancing. The tutorial I've linked is fantastic for figuring out how to do instancing.
I would probably advise that you avoid the geometry shader (since the geometry shader doesn't usually scale well when its purpose is to expand the quantity of vertices), and instead just define a buffer containing all the vertices necessary to draw a single blade (or patch) of grass, then use instancing to redraw that object thousands of times.
Related
Currently I am rendering mesh triangles like this:
// draw the same polygons again
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
shader.setVec3("objectColor", obj_color);
glDrawElements(GL_TRIANGLES, static_cast<unsigned int>(indices.size()), GL_UNSIGNED_INT, 0);
The problem with this code is that I am setting object color inside shader for the full mesh.
What would be a good way to render one single mesh whose faces have different colors?
For now I only know how to set vertex colors, and pass it the fragment shader.
What are the most common ways to set individual face colors?
I only think about duplicating mesh vertices twice to avoid vertex color interpolation.
My current shader looks like this:
Vertex Shader:
#version 330 core
layout (location = 0) in vec3 aPos;
layout (location = 1) in vec3 aNormal;
out vec3 FragPos;
out vec3 Normal;
out vec3 LightPos;
uniform vec3 lightPos;
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;
void main()
{
gl_Position = projection * view * model * vec4(aPos, 1.0);
FragPos = vec3(view * model * vec4(aPos, 1.0));
Normal = mat3(transpose(inverse(view * model))) * aNormal;
LightPos = vec3(vec4(lightPos, 1.0)); // Transform world-space light position to view-space light position
// FragPos = vec3(model * vec4(aPos, 1.0));
//Normal = mat3(transpose(inverse(model))) * aNormal;
// gl_Position = projection * view * vec4(FragPos, 1.0);
}
Fragment Shader:
#version 330 core
out vec4 FragColor;
in vec3 FragPos;
in vec3 Normal;
in vec3 LightPos;
// extra in variable, since we need the light position in view space we calculate this in the vertex shader
uniform vec3 lightColor;
uniform vec3 objectColor;
uniform float f;
uniform float transparency;
void main()
{
//flat shading
// vec3 x_ = dFdx(FragPos);
// vec3 y_= dFdy(FragPos);
// vec3 normal_ = cross(x_, y_);
// vec3 norm_ = normalize(normal_);
// ambient
float ambientStrength = 0.75;
vec3 ambient = ambientStrength * lightColor;
// diffuse
vec3 norm = normalize(Normal);
vec3 lightDir = normalize(LightPos - FragPos);
float diff = max(dot(norm, lightDir), 0.0);//change "norm_" to "norm" avoid the performance warning and have unwelded view
vec3 diffuse = diff * lightColor;
// specular
float specularStrength = 0.01;
vec3 viewDir = normalize(-FragPos); // the viewer is always at (0,0,0) in view-space, so viewDir is (0,0,0) - Position => -Position
vec3 reflectDir = reflect(-lightDir, norm);
float spec = pow(max(dot(viewDir, reflectDir), 0.0), 32);
vec3 specular = specularStrength * spec * lightColor;
vec3 shading = (ambient + diffuse + specular)*objectColor;
//float f = 0.75;
float r_interpolated = shading[0] + f * (objectColor[0] - shading[0]);
float g_interpolated = shading[1] + f * (objectColor[1] - shading[1]);
float b_interpolated = shading[2] + f * (objectColor[2] - shading[2]);
vec3 result = vec3(r_interpolated,g_interpolated,b_interpolated);
FragColor = vec4(result, transparency);
}
You can use the flat Interpolation qualifier:
The value will not be interpolated. The value given to the fragment shader is the value from the Provoking Vertex for that primitive.
Vertex shader
// [...]
layout (location = 0) in vec3 aColor;
flat out vec3 vColor;
void main()
{
vColor = aColor;
// [...]
}
Fragment shader
// [...]
flat in vec3 vColor;
void main()
{
FragColor = vec4(vColor, 1.0);
}
With this implementation, the entire triangle primitive is rendered with one color. If you find an intelligent system for assigning the color attributes to the vertices, you can render all triangles with different colors. e.g. 2 tringles with the indices 0-1-2 and 1-2-3. The color attribute of vertex 0 defines the color of the first triangle and the color attribute of vertex 1 defines the color of the 2nd triangle.
An alternative way would be to create an array of colors for each triangle primitive and store this color array in a Shader Storage Buffer Object. Use gl_VertexID to address the color in the vertex shader.
layout(std430, binding = 0) buffer primitiveColors
{
vec4 colors[];
};
void main()
{
vColor = colors[gl_VertexID / 3];
// [...]
}
trying to implement shadow. I checked my depth texture on a quad, and it seems correct, but the shadow is not displaying. I check my shadow vertex and fragment shaders, and I believe I have done the light space transformation correctly.
Here are my code.
directional light source matrix setup:
//light source states
glm::vec3 Window::lightColor = glm::vec3(0.9f, 0.9f, 0.9f);
glm::vec3 Window::lightDir = glm::vec3(-1.f, -1.f, 0.f);
glm::mat4 Window::lightView = glm::lookAt(glm::vec3(0.f) - glm::normalize(lightDir) * 15.f, glm::vec3(0.0f), glm::vec3(0.f, 1.f, 0.f));
float Window::near_plane = 0.01f;
float Window::far_plane = 50.1f;
float camWidth = 10.f;
glm::mat4 Window::lightProj = glm::ortho(-10.f, 10.f, -10.f, 10.f, Window::near_plane, Window::far_plane);
glm::mat4 Window::lightProjView = lightProj * lightView;
shadow drawing logic:
void Renderer::drawWithShadow(Object* obj) {
//set shader uniforms
Shader* shader = shadowShader;
shader->bind();
shader->setUniformMat4("model", obj->model);
shader->setUniformMat4("projView", projView);
shader->setUniformVec3("viewPos", eyePos);
//need another projection matrix
shader->setUniformMat4("lightSpaceMatrix", shadowProjView);
glcheck(glActiveTexture(GL_TEXTURE0));
glcheck(glBindTexture(GL_TEXTURE_2D, textID));
//light uniforms
shader->setUniformVec3("directionalLightDir", directionalLightDir);
shader->setUniformVec3("lightColor", lightColor);
glcheck(glBindVertexArray(obj->vao));
for (auto i = 0; i < obj->meshList.size(); i++) {
Mesh* mesh = obj->meshList[i];
prepMaterial(mesh->material, shader);
glcheck(glDrawElements(GL_TRIANGLES, mesh->size, GL_UNSIGNED_INT, (GLvoid*)(sizeof(GLuint) * mesh->vertexOffset)));
}
}
vert and frag shaders to prepare shadow depth textures
//vertex shader
#version 330 core
layout (location = 0) in vec3 position;
uniform mat4 projView;
uniform mat4 model;
void main() {
gl_Position = projView * model * vec4(position, 1.0);
}
//fragment shader
#version 330 core
void main()
{
}
vert and frag shaders to draw shadows with Phong lighting
//vertex shader
#version 330 core
layout (location = 0) in vec3 position;
layout (location = 1) in vec3 normal;
layout (location = 2) in vec2 texCoord;
out VS_OUT {
vec4 fragPos;
vec3 normal;
vec2 texCoord;
vec4 fragPosLightSpace;
} vs_out;
uniform mat4 projView;
uniform mat4 model;
uniform mat4 lightSpaceMatrix;
void main()
{
vs_out.fragPos = model * vec4(position, 1.0);
vs_out.normal = transpose(inverse(mat3(model))) * normal;
vs_out.texCoord = texCoord;
vs_out.fragPosLightSpace = lightSpaceMatrix * vs_out.fragPos;
gl_Position = projView * vs_out.fragPos;
}
//fragment shader
#version 330 core
uniform vec3 viewPos; //just the eye pos
uniform vec3 diffuseFactor; //kd
uniform vec3 ambientColor; //ka
uniform vec3 specColor; //ks
uniform float specHighlight; //ns, the larger this value is, the more apparent the light dot on the surface
uniform float dissolve; //d
//lights
uniform vec3 directionalLightDir;
uniform vec3 pointLightPos;
uniform vec3 lightColor;
uniform sampler2D shadowMap;
//uniform sampler2DShadow shadowMap;
in VS_OUT {
vec4 fragPos;
vec3 normal;
vec2 texCoord;
vec4 fragPosLightSpace;
} fs_in;
out vec4 fragColor;
float ShadowCalculation(vec4 fragPosLightSpace)
{
vec3 projCoords = fragPosLightSpace.xyz / fragPosLightSpace.w;
vec2 shadowCoords;
shadowCoords.x = projCoords.x * 0.5 + 0.5;
shadowCoords.y = projCoords.y * 0.5 + 0.5;
float closestDepth = texture(shadowMap, shadowCoords).r;
float currentDepth = projCoords.z * 0.5 + 0.5;
float shadowValue = currentDepth + 0.00001 > closestDepth ? 1.0 : 0.0;
//if(currentDepth < 0.0)
//shadowValue = 0.0;
return shadowValue;
}
void main()
{
vec3 lightDir = normalize(-directionalLightDir);
vec3 norm = normalize(fs_in.normal);
//diffuse lighting
float diffStrength = max(dot(norm, lightDir), 0.0); // this calculates diffuse intensity based on angle
vec3 diffuse = lightColor * diffStrength * diffuseFactor;
//specular
vec3 viewDir = normalize(viewPos - fs_in.fragPos.xyz);
vec3 reflectDir = reflect(-lightDir, norm);
float spec = 0.0;
if(specHighlight > 0.0) { // if specHighlight is < 0, pow might produce undefined result if base is also 0
spec = pow(max(dot(viewDir, reflectDir), 0.0), specHighlight);
}
vec3 specular = spec * specColor * lightColor;
float shadow = ShadowCalculation(fs_in.fragPosLightSpace);
//float shadow = textureProj(shadowMap, fs_in.fragPosLightSpace);
//vec3 result = ambientColor * 0.05 * lightColor + (diffuse + specular)*(1-shadow);
vec3 result = (diffuse + specular)*(1.0 - shadow);
fragColor = vec4(result, 1);
}
with just Phong shading, the scene looks like this:
Phong shading
when the scene is seen from the light source as depth value:
depth texture on quad
when I finally render the scene, it is mostly black; I made sure the far plane covers all of the bunnies:
render shadow
I am busy implementing a deferred lighting system and have gotten all the way to having the bound position, diffuse and normal textures in my fragment shader in which I am to calculate the lighting specifications for each fragment.
#version 400 core
in vec3 fs_position;
in vec3 fs_color;
in vec4 fs_attenuation;
layout (location = 0) out vec4 outColor;
uniform sampler2D diffuseSampler;
uniform sampler2D positionSampler;
uniform sampler2D normalSampler;
const float cutOffFactor = 200;
const float reflectivity = 0.15;
const float shineDamper = 1;
void main(void){
vec2 frag = gl_PointCoord.xy;
frag.x = (frag.x+1)/2f;
frag.y = ((frag.y+1)/2f);
vec4 texDiffuse = texture(diffuseSampler,frag);
vec4 texPosition = texture(positionSampler,frag);
vec4 texNormal = texture(normalSampler,frag);
vec3 p = vec3(fs_position.xyz);
vec3 ePosition = texPosition.xyz;
ePosition = ePosition*200;
vec3 eNormal = texNormal.xyz;
vec3 unitNormal = normalize(eNormal);
outColor = vec4(texNormal.xyz,1.0);
}
That is literally all that my Fragment Shader contains.
The probem lies at "vec3 p = vec3(fs_position.xyz);".
When I remove it the program renders a perfect normal map, but when I add it a blank screen in which I can rotate and eventually a certain color flickers.
fs_position has nothing to do with color and was inputted from the geometry shader (all references are correct) yet it somehow causes a massive malfunction.
Same thing happens as well with all in variables (fs_color and fs_attenuation).
Whats being rendered is a non-blended quad of equal per-vertex properties that covers the viewport that renders to a color_attachment that exists(as said without that line everything works).
(blending does nothing, and I will put additive blending on when I get a result worthy of allowing me to continue)
Any help will be appreciated, the engine and shaders have never acted this way for me before and no errors are popping up.
Extra code:
Vertex shader
#version 400 core
in vec3 position;
in vec3 color;
in vec4 attenuation;
out vec3 gs_position;
out vec3 gs_color;
out vec4 gs_attenuation;
uniform mat4 projectionMatrix;
uniform mat4 viewMatrix;
void main(void){
vec4 worldPosition = vec4(position,1.0);
vec4 viewPosition = viewMatrix * worldPosition;
gl_Position = projectionMatrix * viewPosition;
gs_position = viewPosition.xyz;
gs_color = color;
gs_attenuation = attenuation;
}
GeometryShader
#version 150
layout (points) in;
layout (triangle_strip,max_vertices = 4) out;
in vec3 gs_position[];
in vec3 gs_color[];
in vec4 gs_attenuation[];
out vec3 fs_position;
out vec3 fs_color;
out vec4 fs_attenuation;
void main(void){
gl_Position = vec4(-1,1,0,1);
fs_position = gs_position[0];
fs_color = gs_color[0];
fs_attenuation = gs_attenuation[0];
EmitVertex();
gl_Position = vec4(-1,-1,0,1);
fs_position = gs_position[0];
fs_color = gs_color[0];
fs_attenuation = gs_attenuation[0];
EmitVertex();
gl_Position = vec4(1,1,0,1);
fs_position = gs_position[0];
fs_color = gs_color[0];
fs_attenuation = gs_attenuation[0];
EmitVertex();
gl_Position = vec4(1,-1,0,1);
fs_position = gs_position[0];
fs_color = gs_color[0];
fs_attenuation = gs_attenuation[0];
EmitVertex();
EndPrimitive();
}
Example of light values:
Position: -1, 0.5, -1
Color: 0, 0.5 ,0
Attenuation: 1, 0.1, 0.2, 0
As for the requested screenshots, basically without referencing an in variable I get something like this:
And with it I get a black screen, which is pretty easy to visualise.
(Although when rotating the view matrix (y-axis) there is a certain point at which the quad gets colored green, although I cant get values for it)
I'm trying to calculate per-face normals in geometry shader, using the following pipeline
//VERTEX_SHADER
#version 330
layout(location = 0) in vec4 vertex;
out vec3 vert;
uniform mat4 projMatrix;
uniform mat4 mvMatrix;
void main()
{
vert = vertex.xyz;
gl_Position = projMatrix * mvMatrix * vertex;
}
//GEOMETRY_SHADER
#version 330
layout ( triangles ) in;
layout ( triangle_strip, max_vertices = 3 ) out;
out vec3 normal_out;
uniform mat3 normalMatrix;
void main()
{
vec3 A = gl_in[2].gl_Position.xyz - gl_in[0].gl_Position.xyz;
vec3 B = gl_in[1].gl_Position.xyz - gl_in[0].gl_Position.xyz;
normal_out = normalMatrix * normalize(cross(A,B));
gl_Position = gl_in[0].gl_Position;
EmitVertex();
gl_Position = gl_in[1].gl_Position;
EmitVertex();
gl_Position = gl_in[2].gl_Position;
EmitVertex();
EndPrimitive();
}
//FRAG_SHADER
#version 330
in vec3 normal_out;
in vec3 vert;
out vec4 fColor;
uniform vec3 lightPos;
void main()
{
highp vec3 L = normalize(lightPos - vert);
highp float NL = max(dot(normal_out, L), 0.0);
highp vec3 color = vec3(1, 1, 0.0);
fColor = vec4(color*NL, 1.0);
}
However, I end up with very weird looking faces that keeps flickering(I included a snapshot below). It occurred to me that it might be because I'm using 8 vertices to represent 1 cell(cube) instead of 24 vertices, But I'm not quite sure if that is what is causing the problem.
Left: Using Light Weighting 'NL', Right:Without
After every call to EmitVertex, the contents of all output variables are made undefined. Therefore, if you want to output the same value to multiple vertices, you must copy it to the output every time.
Also, note that each shader stage's outputs provide inputs only to the next stage. So if you have a GS, and you want to pass a value from the VS to the FS, you must have the GS explicitly pass that value through.
I have created an application in OpenGL that uses a Vertex Shader, Geometry Shader, and Fragment Shader.
I have a uniform variable, eyePositionWorld that I would like to use both in the Geometry Shader and the Fragment Shader.
(I am rendering the position of the verticies compared to the eyePositionWorld as the color)
Vertex Shader
#version 430
in vec4 vertexPositionModel;
in vec3 vertexColor;
in vec3 vertexNormalModel;
in mat4 modelMatrix;
uniform mat4 viewMatrix;//World To View
uniform mat4 projectionMatrix;//View to Projection
struct fData
{
vec3 fragColor;
vec3 fragPositionWorld;
vec3 fragNormalWorld;
};
out fData geomData;
void main()
{
gl_Position = projectionMatrix * viewMatrix * modelMatrix * vertexPositionModel;
geomData.fragColor = vertexColor;
geomData.fragPositionWorld = (modelMatrix * vertexPositionModel).xyz;
geomData.fragNormalWorld = (modelMatrix * vec4(vertexNormalModel, 0.0)).xyz;
}
Geometry Shader
#version 430
layout(triangles_adjacency) in;
layout(triangle_strip, max_vertices=3) out;
struct fData
{
vec3 fragColor;
vec3 fragPositionWorld;
vec3 fragNormalWorld;
};
uniform vec3 eyePositionWorldGeomShader;
in fData geomData[];
out fData fragData;
void main() {
gl_Position = gl_in[0].gl_Position;
fragData = geomData[0];
fragData.fragColor = gl_in[0].gl_Position.xyz - eyePositionWorldGeomShader;
EmitVertex();
gl_Position = gl_in[2].gl_Position;
fragData = geomData[2];
fragData.fragColor = gl_in[2].gl_Position.xyz - eyePositionWorldGeomShader;
EmitVertex();
gl_Position = gl_in[4].gl_Position;
fragData = geomData[4];
fragData.fragColor = gl_in[4].gl_Position.xyz - eyePositionWorldGeomShader;
EmitVertex();
EndPrimitive();
}
Fragment Shader
#version 430
struct fData
{
vec3 fragColor;
vec3 fragPositionWorld;
vec3 fragNormalWorld;
};
in fData fragData;
uniform vec4 ambientLight;
uniform vec3 lightPositionWorld;
uniform vec3 eyePositionWorld;
uniform bool isLighted;
out vec4 color;
void main()
{
if (!isLighted)
{
color = vec4(fragData.fragColor, 1.0);
}
else
{
vec3 lightVectorWorld = normalize(lightPositionWorld - fragData.fragPositionWorld);
float brightness = clamp(dot(lightVectorWorld, normalize(fragData.fragNormalWorld)), 0.0, 1.0);
vec4 diffuseLight = vec4(brightness, brightness, brightness, 1.0);
vec3 reflectedLightVectorWorld = reflect(-lightVectorWorld, fragData.fragNormalWorld);
vec3 eyeVectorWorld = normalize(eyePositionWorld - fragData.fragPositionWorld);
float specularity = pow(clamp(dot(reflectedLightVectorWorld, eyeVectorWorld), 0.0, 1.0), 40) * 0.5;
vec4 specularLight = vec4(specularity, specularity, specularity, 1.0);
//Maximum Distance of All Lights
float maxDist = 55.0;
float attenuation = clamp((maxDist - length(lightPositionWorld - fragData.fragPositionWorld)) / maxDist, 0.0, 1.0);
color = (ambientLight + (diffuseLight + specularLight) * attenuation) * vec4(fragData.fragColor, 1.0);
}
}
C++ Code (the m_eyePositionUL and m_eyePositionGeomShaderUL are both just loaded with glGetUniformLocation)
glUniform3fv(m_eyePositionUL, 1, &m_camera.getPosition()[0]);
glUniform3fv(m_eyePositionGeomShaderUL, 1, &m_camera.getPosition()[0]);
How can I only upload one uniform to OpenGL and use it in both the Geometry Shader and Vertex Shader?
It's a bit surprising but OpenGL makes it easy. All that you have to do is use the same uniform name in both Shaders!
Then just upload it once under that uniform location.
Replace uniform vec3 eyePositionWorldGeomShader; with uniform vec3 eyePositionWorld; in your Geometry Shader and keep the uniform name the same in the Fragment Shader.
Then just don't upload the other Uniform so your C++ code will simply be
glUniform3fv(m_eyePositionUL, 1, &m_camera.getPosition()[0]);