Multiple Definitions: Stumped - c++

I have a simple program using FreeGLUT, OpenGL, and some nice simplex noise functions from Eliot Eshelman. The goal is to display 2D slices some 3D simplex noise, which I've managed with SDL.
My problem is: I'm getting multiple definition errors that I can't seem to resolve.
I've searched around on Google and these forums for a while. I learned about quite a few things that could cause errors, BUT I can't seem to find the cause of mine. I've been stumped by these errors for many hours.
Here's my relevant sources and error outputs.
Note that there could be more bugs past the errors I'm getting, but I haven't been able to debug far enough to get to them yet.
Output:
-------------- Build: Release in OpenGL Test (compiler: GNU GCC Compiler)---------------
mingw32-g++.exe -Wall -O2 -std=c++11 -IC:\freeglut\include -c "C:\OpenGL Test\OpenGL Test\main.cpp" -o obj\Release\main.o
mingw32-g++.exe -LC:\freeglut\lib -o "bin\Release\OpenGL Test.exe" obj\Release\LUtil.o obj\Release\main.o obj\Release\simplexnoise.o -s -lmingw32 -lOpenGL32 -lglu32 -lfreeglut -mwindows
obj\Release\main.o:main.cpp:(.bss+0x0): multiple definition of `textureName'
obj\Release\LUtil.o:LUtil.cpp:(.bss+0x0): first defined here
obj\Release\main.o:main.cpp:(.bss+0x20): multiple definition of `noiseImage'
obj\Release\LUtil.o:LUtil.cpp:(.bss+0x20): first defined here
obj\Release\main.o:main.cpp:(.bss+0x12c020): multiple definition of `zOffset'
obj\Release\LUtil.o:LUtil.cpp:(.bss+0x12c020): first defined here
collect2.exe: error: ld returned 1 exit status
Process terminated with status 1 (0 minute(s), 0 second(s))
0 error(s), 0 warning(s) (0 minute(s), 0 second(s))
LUtil.h:
/*This source code copyrighted by Lazy Foo' Productions (2004-2013)
and may not be redistributed without written permission.*/
//Version: 001
#ifndef LUTIL_H
#define LUTIL_H
#include "LOpenGL.h"
#include "simplexnoise.h"
//Screen Constants
const int SCREEN_WIDTH = 640;
const int SCREEN_HEIGHT = 480;
const int SCREEN_FPS = 60;
float zOffset = 0;
float noiseImage[640][480];
GLuint textureName;
bool initGL();
/*
Pre Condition:
-A valid OpenGL context
Post Condition:
-Initializes matrices and clear color
-Reports to console if there was an OpenGL error
-Returns false if there was an error in initialization
Side Effects:
-Projection matrix is set to identity matrix
-Modelview matrix is set to identity matrix
-Matrix mode is set to modelview
-Clear color is set to black
*/
void update();
/*
Pre Condition:
-None
Post Condition:
-Does per frame logic
Side Effects:
-None
*/
void render();
/*
Pre Condition:
-A valid OpenGL context
-Active modelview matrix
Post Condition:
-Renders the scene
Side Effects:
-Clears the color buffer
-Swaps the front/back buffer
*/
#endif
LUtil.cpp:
/*This source code copyrighted by Lazy Foo' Productions (2004-2013)
and may not be redistributed without written permission.*/
//Version: 001
#include "LUtil.h"
bool initGL()
{
//Initialize Projection Matrix
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
//Initialize Modelview Matrix
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
//Initialize clear color
glClearColor( 0.f, 0.f, 0.f, 1.f );
//Check for error
GLenum error = glGetError();
if( error != GL_NO_ERROR )
{
printf( "Error initializing OpenGL! %s\n", gluErrorString( error ) );
return false;
}
glGenTextures(1, &textureName);
glBindTexture(GL_TEXTURE_2D, textureName);
return true;
}
void update()
{
for(int y = 0; y < SCREEN_HEIGHT; y++)
{
for(int x = 0; x < SCREEN_WIDTH; x++)
{
noiseImage[x][y] = raw_noise_3d(x, y, zOffset);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, SCREEN_WIDTH, SCREEN_HEIGHT, 0, GL_RGB, GL_FLOAT, &noiseImage[0][0]);
}
}
}
void render()
{
//Clear color buffer
glClear( GL_COLOR_BUFFER_BIT );
glColor4f(1, 0.5f, 0, 1);
//Render quad
glBegin( GL_QUADS );
glTexCoord2f( -1.f, -1.f );
glTexCoord2f( -1.f, 1.f );
glTexCoord2f( 1.f, -0.f );
glTexCoord2f( 1.f, 1.f );
glEnd();
//Update screen
glutSwapBuffers();
}
main.cpp:
/*This source code copyrighted by Lazy Foo' Productions (2004-2013)
and may not be redistributed without written permission.*/
//Version: 001
#include "LUtil.h"
void runMainLoop( int val );
/*
Pre Condition:
-Initialized freeGLUT
Post Condition:
-Calls the main loop functions and sets itself to be called back in 1000 / SCREEN_FPS milliseconds
Side Effects:
-Sets glutTimerFunc
*/
int main( int argc, char* args[] )
{
//Initialize FreeGLUT
glutInit( &argc, args );
//Create OpenGL 2.1 context
glutInitContextVersion( 4, 4 );
//Create Double Buffered Window
glutInitDisplayMode( GLUT_DOUBLE );
glutInitWindowSize( SCREEN_WIDTH, SCREEN_HEIGHT );
glutCreateWindow( "OpenGL" );
//Do post window/context creation initialization
if( !initGL() )
{
printf( "Unable to initialize graphics library!\n" );
return 1;
}
//Set rendering function
glutDisplayFunc( render );
//Set main loop
glutTimerFunc( 1000 / SCREEN_FPS, runMainLoop, 0 );
//Start GLUT main loop
glutMainLoop();
return 0;
}
void runMainLoop( int val )
{
//Frame logic
update();
render();
//Run frame one more time
glutTimerFunc( 1000 / SCREEN_FPS, runMainLoop, val );
}
As far as I can tell, there are no multiple definitions, but I could be wrong.
Once again, I'm sorry if I'm "That guy" with this question.

Move the definitions of your global variabls to LUtil.cpp:
float zOffset = 0;
float noiseImage[640][480];
GLuint textureName;
Then declare the variabls in LUtil.h:
extern float zOffset;
extern float noiseImage[640][480];
extern GLuint textureName;

Related

Opengl Cant set color to solid with glColor3f(1.0f, 0.0f, 0.0f) very opaque red

in image appearing red opaque but i set the glColor3f(1.0f, 0.0f, 0.0f);
//command compiler g++ console tested with Visual Studio Code
//g++ GL01Hello.cpp -o GL01Hello.exe -L"C:/MinGW/freeglut/lib" -lglu32 -lopengl32 -lfreeglut -I"C:\MinGW\freeglut\include\GL"
/*
* GL01Hello.cpp:With Load Background Image and Poligon Test OpenGL/GLUT C/C++ Setup
* Tested Visual Studio Code with MinGW
* To compile with -lfreeglut -lglu32 -lopengl32 and
*/
#include <windows.h> // for MS Windows
#include <stdio.h> /* printf, scanf, puts, NULL */
#include <iostream>
#include <stdlib.h> /* srand, rand */
#include <ctime>
#include <freeglut.h> // GLUT, include glu.h and gl.h
using namespace std;
float spin = 0.0;
GLuint texture = 0;
int w1 = 0;
int h1 = 0;
// for random color primitive polygon
//static GLubyte redc,greenc,bluec;
bool prim_polygonmode = false;
// glut_load_image
GLuint LoadTexture( const char * filename )
{
GLuint texture;
int width, height;
unsigned char * data;
FILE * file;
file = fopen( filename, "rb" );
if(!file)
std::cout<<"File not Found"<<std::endl;
if ( file == NULL ) return 0;
width = 1360;
height = 768;
data = (unsigned char *)malloc( width * height * 3 );
//int size = fseek(file,);
fread( data, width * height * 3, 1, file );
fclose( file );
for(int i = 0; i < width * height ; ++i)
{
int index = i*3;
unsigned char B,R;
B = data[index];
R = data[index+2];
data[index] = R;
data[index+2] = B;
}
glGenTextures( 1, &texture );
glBindTexture( GL_TEXTURE_2D, texture );
glPixelStorei( GL_UNPACK_ALIGNMENT, 1 );//Necessary for correct elements value 4 default
glTexEnvf( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE,GL_MODULATE );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,GL_LINEAR_MIPMAP_NEAREST );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,GL_LINEAR );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S,GL_REPEAT );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T,GL_REPEAT );
gluBuild2DMipmaps( GL_TEXTURE_2D, 3, width, height,GL_RGB, GL_UNSIGNED_BYTE, data );
free( data );
return texture;
}
/* Initialize OpenGL Graphics just n this case for colors */
void initGL() {
// Set "clearing" or background color
glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // Black and opaque
//randomnumber color by ctime library
srand(time(NULL));
//redc = rand()%255;
//greenc = rand()%255;
//bluec = rand()%255;
}
/* Called back when there is no other event to be handled */
void idle() {
spin = spin + 0.075;
if (spin > 360.0)
spin = 0;
glutPostRedisplay(); // Post a re-paint request to activate display()
}
/* Handler for window re-size event. Called back when the window first appears and
whenever the window is re-sized with its new width and height */
void reshape(GLsizei width, GLsizei height) { // GLsizei for non-negative integer
// Compute aspect ratio of the new window
w1 = width;
h1 = height;
if (height == 0) height = 1; // To prevent divide by 0
GLfloat aspect = (GLfloat)width / (GLfloat)height;
// Set the viewport to cover the new window
glViewport(0, 0, width, height);
// Set the aspect ratio of the clipping area to match the viewport
glMatrixMode(GL_PROJECTION); // To operate on the Projection matrix
glLoadIdentity();
if (width >= height) {
// aspect >= 1, set the height from -1 to 1, with larger width
gluOrtho2D(-1.0 * aspect, 1.0 * aspect, -1.0, 1.0);
} else {
// aspect < 1, set the width to -1 to 1, with larger height
gluOrtho2D(-1.0, 1.0, -1.0 / aspect, 1.0 / aspect);
}
}
void orthogonalStart()
{
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glLoadIdentity();
gluOrtho2D(-w1/2, w1/2, -h1/2, h1/2);
glMatrixMode(GL_MODELVIEW);
}
void orthogonalEnd()
{
glMatrixMode(GL_PROJECTION);
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
}
void background()
{
glBindTexture( GL_TEXTURE_2D, texture );
orthogonalStart();
glEnable(GL_POLYGON_OFFSET_FILL);
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
glPolygonOffset(1,1);
// texture width/height
const int iw = 1360;
const int ih = 768;
glPushMatrix();
glTranslatef( -iw/2, -ih/2, 0 );
glBegin(GL_QUADS);
glColor3f(1.0f, 1.0f, 1.0f); // always default color white stars, if no this line will random color same of polygon
glTexCoord2i(0,0); glVertex2i(0, 0);
glTexCoord2i(1,0); glVertex2i(iw, 0);
glTexCoord2i(1,1); glVertex2i(iw, ih);
glTexCoord2i(0,1); glVertex2i(0, ih);
glEnd();
glPopMatrix();
orthogonalEnd();
}
void display() {
glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // Set background color to black and opaque
glClear(GL_COLOR_BUFFER_BIT);// Clear the color buffer (background
glEnable( GL_TEXTURE_2D );
background();
gluLookAt (0.0, 0.0, 5.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0);
// A SQUARE PARAMETERS
if (prim_polygonmode) { // draw polygon mode lines
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
} else {
glEnable(GL_POLYGON_OFFSET_FILL);
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
glPolygonOffset(1,1);
}
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glPushMatrix();
glRotatef(spin , 0., 0., 1.);
glTranslatef(50.0, 50.0, 0);
glTranslatef(-50.0, -50.0, 0);
glBegin(GL_QUADS); // Each set of 4 vertices form a quad
//glColor3ub(redc, greenc, bluec); // Random red green blue value
glColor3f(1.0f, 0.0f, 0.0f); // Random red green blue value
glVertex2f(-0.5f, -0.5f); // x, y default 0.5f values
glVertex2f( 0.5f, -0.5f);
glVertex2f( 0.5f, 0.5f);
glVertex2f(-0.5f, 0.5f);
glEnd();
//angle += 5.0f;
glPopMatrix();
// glFlush(); // Render now
glutSwapBuffers(); // Double buffered - swap the front and back buffers
}
/* Callback handler for special-key event */
void specialKeys(int key, int x, int y) {
switch (key) {
case GLUT_KEY_F1: // F1: Toggle wireframe and solid polygon
prim_polygonmode = !prim_polygonmode; // Toggle state
break;
}
}
/* Main function: GLUT runs as a console application starting at main() */
int main(int argc, char** argv) {
glutInit(&argc, argv); // Initialize GLUT
glutInitDisplayMode(GLUT_DOUBLE); // Enable double buffered mode
glutInitWindowSize(1360, 768); // Set the window's initial width & height
glutInitWindowPosition(0, 0);
// Position the window's initial top-left corner
glutCreateWindow("OpenGL Setup Test"); // Create a window with the given title
glutSpecialFunc(specialKeys); // Register callback handler for special-key event
glutDisplayFunc(display); // Register display callback handler for window re-paint
glutReshapeFunc(reshape);
glutIdleFunc(idle);
// GLuint texture;
texture = LoadTexture( "stars.bmp" );
initGL();
glutMainLoop();// Enter the event-processing loop
//Free our texture
glDeleteTextures( 1, &texture );
return 0;
}
This code have a set background and small animation of square.
Dont know wihy cant set more the solid colors. Then the wireframe square i got a very litle line red need got the bright color red.Maybe any filte, ou buffer causing that?
if possible please help me.
OpenGL is a state engine. Once a state is set, it is persistent until it is change again.
This means if 2 dimensional texturing is enabled, all the following geometry is "textured".
Note, when glVertex2f is called then the current texture coordinate is associated with the vertex coordinate. If you don't explicitly set a texture coordinate, then the last texture coordinate which was set is still the current texture coordinate and will be associated to the vertex coordinate. This may cause a random like behavior.
If texturing is enabled, then by default the color of the texel is multiplied by the current color, because by default the texture environment mode (GL_TEXTURE_ENV_MODE) is GL_MODULATE. See glTexEnv.
That all means:
If you want to draw a geometry with a texture then enable texturing and set a "white" color:
glEnable(GL_TEXTURE_2D)
glColor3f(1.0f, 1.0f, 1.0f);
background();
If you want to draw a uniform colored geometry, then set the color and disable texturing:
glDisable(GL_TEXTURE_2D);
glColor3f(1.0f, 0.0f, 0.0f);
glBegin(GL_QUADS);
// [...]
glEnd();

Using SDL2 and OpenGL to compile a simple triangle

This question has been posed all over the internet yet every code example I try seem to have the same result as all the attempts I have personally pursued towards generating a simple triangle with OpenGL and SDL2. Before switching to a different windowing API my curiosity won't allow me to let this dilemma simply be.
As for my question to be solved, I simply wish to figure out whether the source is my system ( in consideration to the fact that all the code examples I have found all look similar and seems to make itself known on 2 different computers ) or if I'm just doing something wrong myself.
My PC has a GTX 1050m running on ubuntu 18.04 my drivers are the official nvidia drivers on the ubuntu ppa it is capable of OpenGL 4.6
This is display.h my header file for initializing SDL2 and Glew
#ifndef DISPLAY_H
#define DISPLAY_H
#include<SDL2/SDL.h>
#include<string>
class Display
{
public:
//initializes SDL2 and Glew
Display( int width, int height, const std::string& title );
//Clears the screen and creates a triangle
void Clear();
//Calls SDL_GL_SwapWindow()
void Update();
//Determines whether or not SDL_QUIT Event is called
bool IsClosed();
//Destroys application
virtual ~Display();
protected:
private:
Display( const Display& other )
void operator=( const Display& other );
SDL_Window* m_window;
SDL_GLContext m_glContext;
bool m_isClosed;
};
#endif //DISPLAY_H
This is display.cpp
//Constructor and Destructor container
#include"display.h"
//Glew container
#include<GL/glew.h>
//Standard I\0 lib
#include<cstdio>
//Constructor
Display::Display( int width, int height, const std::string& title )
{
//SDL2 initialization and error check
if( SDL_Init( SDL_INIT_VIDEO ) != 0 ){
printf( "Unable to initialize SDL: %s\n", SDL_GetError() );
SDL_ClearError();
}
SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 16);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK,
SDL_GL_CONTEXT_PROFILE_COMPATIBILITY );
//SDL Create window stored in variable m_window
m_window = SDL_CreateWindow( title.c_str(),
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
width,
height,
SDL_WINDOW_OPENGL|SDL_WINDOW_RESIZABLE );
// Error checking window
if( !m_window ) {
fprintf( stderr, "Could not create window: %s\n", SDL_GetError() );
return;
}
//Creates OpenGL context
m_glContext = SDL_GL_CreateContext( m_window );
if ( !m_glContext ){
fprintf( stderr, "Couldn't create context: %s\n", SDL_GetError() );
return;
}
//Makes the open window the current context
SDL_GL_MakeCurrent( m_window, m_glContext );
//Checks How OpenGL handled setting
int rs, gs,bs;
SDL_GL_GetAttribute( SDL_GL_RED_SIZE, &rs );
SDL_GL_GetAttribute( SDL_GL_GREEN_SIZE, &gs );
SDL_GL_GetAttribute( SDL_GL_BLUE_SIZE, &bs );
printf( "Red size: %d, Green size: %d, Blue size: %d\n", rs, gs, bs );
//To be implemented
//glewExperimental = GL_TRUE;
//initialization of glew
GLenum status = glewInit();
if(GLEW_OK != status ){
printf( "GLEW Error: ", glewGetErrorString( status ) );
}
printf( "Status: Using GLEW: %s\n", glewGetString( GLEW_VERSION ) );
}
//Destructer
Display::~Display()
{
//Deletes GL context
SDL_GL_DeleteContext( m_glContext );
//Close and destroy the window
SDL_DestroyWindow( m_window );
//Clean up
SDL_Quit();
}
void Display::Clear()
{
//Clears the screen
glClear( GL_COLOR_BUFFER_BIT );
//Creating a triangle
glBegin( GL_POLYGON );
glColor3f( 1, 0, 0 );
glVertex3f( -0.6, -0.75, 0.5 );
glColor3f( 0, 1, 0 );
glVertex3f( 0.6, -0.75, 0 );
glColor3f( 0, 0, 1 );
glVertex3f( -0, -0.75, 0 );
glEnd();
glFlush();
}
//Closes the window on SDL_QUIT event
bool Display::IsClosed()
{
return m_isClosed;
}
void Display::Update()
{
SDL_GL_SwapWindow( m_window );
SDL_Event e;
while( SDL_PollEvent( &e ) )
{
if( e.type == SDL_QUIT )
m_isClosed = true;
}
}
And finally this is my the simple main.cpp
#include<GL/glew.h>
#include<iostream>
#include"display.h"
int main( int argc, char* argv[] )
{
Display display( 640, 480, "flagShip" );
while( !display.IsClosed() )
{
display.Clear();
display.Update();
}
return 0;
}
Handy Makefile
CC=g++
OBJS= main.cpp
LINKER_FLAGS= -lSDL2 -lGL -lGLEW
COMPILER_FLAGS= -w -pedantic
OBJ_NAME=flagShip
$(OBJ_NAME):main.o display.o
$(CC) $(COMPILER_FLAGS) -o $(OBJ_NAME) main.o display.o $(LINKER_FLAGS)
main.o:$(OBJS) display.h
$(CC) $(COMPILER_FLAGS) -c $(OBJS)
display.o: display.cpp display.h
$(CC) $(COMPILER_FLAGS) -c display.cpp
You do not use any view matrix or projection matrix. This means you have to set up the vertex coordinates of the triangle in clip space, respectively in normalized device space.
The normalized device space is a cube where the left, lower, near coordinate is (-1, -1, -1) and the right, upper, far coordinate is (1, 1, 1):
The projection of the x-axis to the viewport, points from the left to the right and the projection of the y-axis points from the bottom to the top.
Since all the y-coordinates of your triangle are equal, the triangle is perpendicular to the viewport:
glVertex3f( -0.6, -0.75, 0.5 );
...
glVertex3f( 0.6, -0.75, 0 );
...
glVertex3f( -0, -0.75, 0 );
Swap the y- and z-component of the vertex coordinates of the triangle, to solve the issue:
glVertex3f( -0.6, 0.5, -0.75 );
...
glVertex3f( 0.6, 0.0, -0.75 );
...
glVertex3f( -0.0, 0.0, -0.75 );
Preview:
Take a piece of graph paper, then draw out the vertex coordinates of your "triangle". Do you notice something?
Hint 1: the area of the triangle you draw is zero.
Hint 2: Look at the y-value of the last glVertex call.

OpenGL drawing lines on non square screen

I'm using OpenGl in c++ to draw '+' symbols. My screen has a resolution of 1920x1080, but unfortunately with my method of drawing ("glBegin(GL_LINES)" ) only works on a 1080x1080 rectangle. For x > screen_height everything gets cut off. However, I can still locate text (using Glut) on the full 1920x1080 surface. Could you help me to find what setting is at fault?
I attached a code snippet. This may not compile, as I only included what is used for the drawing of the '+' symbol.
Much appreciated. Thank you in advance.
// Libraries needed for OpenGL and strings
#include <GL/glut.h>
#include <GLFW/glfw3.h>
// header file required for this cpp file
#include "window2.hxx"
int main(int argc, char *argv[])
{
// Start GLFW as main OpenGL frontend
GLFWwindow* window;
if( !glfwInit() )
{
fprintf( stderr, "Failed to initialize GLFW\n" );
exit( EXIT_FAILURE );
}
// Later upstream GLut is used for text rendering: thus also initialize GLUT (freeglut3)
glutInit(&argc, argv);
// check the resolution of the monitor and pass it to the create window function
const GLFWvidmode* mode = glfwGetVideoMode(glfwGetPrimaryMonitor());
screen_width = mode->width;
screen_height = mode->height;
window = glfwCreateWindow(screen_width , screen_height, "LearnOpenGL", glfwGetPrimaryMonitor(), NULL );
if (!window)
{
fprintf( stderr, "Failed to open GLFW window\n" );
glfwTerminate();
exit( EXIT_FAILURE );
}
glfwMakeContextCurrent(window);
glfwSwapInterval( 1 );
// set up view
glViewport( 0, 0, screen_height, screen_width );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
// see https://www.opengl.org/sdk/docs/man2/xhtml/glOrtho.xml
glOrtho(0.0,screen_height,0.0,screen_width,0.0,1.0); // this creates a canvas for 2D drawing on
x_1 = 500;
y_1 = 100;
while( !glfwWindowShouldClose(window) ) {
// Draw gears
render_loop();
// Swap buffers
glfwSwapBuffers(window);
glfwPollEvents();
} // glfw while loop
}
//OpenGL Draw function
void render_loop()
{
// white background
glClearColor ( 1.0f, 1.0f, 1.0f, 1.0f );
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glPointSize(10);
glLineWidth(1);
// push matrix
glPushMatrix();
glColor3f(0.0, 0.0, 1.0);
glBegin(GL_LINES);
glVertex2i(x_1-10,y_1);
glVertex2i(x_1+10,y_1);
glVertex2i(x_1,y_1-10);
glVertex2i(x_1,y_1+10);
glEnd();
// pop matrix
glPopMatrix();
}
I think you've got your dimensions the wrong way round:
// set up view
glViewport( 0, 0, screen_height, screen_width );
...
glOrtho(0.0,screen_height,0.0,screen_width,0.0,1.0);
Try
glViewport( 0, 0, screen_width, screen_height);
...
glOrtho(0.0,screen_width,0.0,screen_height,0.0,1.0);

Is It More Efficient to Use GL_TRIANGLE_STRIP or Indexed GL_TRIANGLES to a Draw a Dynamic Number of Quads

I'm developing a simple sprite-based 2D game in C++ that uses OpenGL for hardware-accelerated rendering, and SDL for window management and user input handling. Since it's a 2D game, I'm only ever going to need to draw quads, but because the number of sprites is dynamic, I can never rely on there being a constant number of quads. Consequently, I need to rebuffer all of the vertex data via my VBO each frame (since there may be more or fewer quads than there were in the last frame, and thus the buffer may be a different size).
The prototype program I have so far creates a window and allows the user to add and remove quads in a diagonal row by using the up and down arrow keys. Right now the quads I'm drawing are simple, untextured white squares. Here is the code I'm working with (compiles and works correctly under OS X 10.6.8 and Ubuntu 12.04 with OpenGL 2.1):
#if defined(__APPLE__)
#include <OpenGL/OpenGL.h>
#endif
#if defined(__linux__)
#define GL_GLEXT_PROTOTYPES
#include <GL/glx.h>
#endif
#include <GL/gl.h>
#include <SDL.h>
#include <iostream>
#include <vector>
#include <string>
struct Vertex
{
//vertex coordinates
GLint x;
GLint y;
};
//Constants
const int SCREEN_WIDTH = 1024;
const int SCREEN_HEIGHT = 768;
const int FPS = 60; //our framerate
//Globals
SDL_Surface *screen; //the screen
std::vector<Vertex> vertices; //the actual vertices for the quads
std::vector<GLint> startingElements; //the index where the 4 vertices of each quad begin in the 'vertices' vector
std::vector<GLint> counts; //the number of vertices for each quad
GLuint VBO = 0; //the handle to the vertex buffer
void createVertex(GLint x, GLint y)
{
Vertex vertex;
vertex.x = x;
vertex.y = y;
vertices.push_back(vertex);
}
//creates a quad at position x,y, with a width of w and a height of h (in pixels)
void createQuad(GLint x, GLint y, GLint w, GLint h)
{
//Since we're drawing the quads using GL_TRIANGLE_STRIP, the vertex drawing
//order is from top to bottom, left to right, like so:
//
// 1-----3
// | |
// | |
// 2-----4
createVertex(x, y); //top-left vertex
createVertex(x, y+h); //bottom-left vertex
createVertex(x+w, y); //top-right vertex
createVertex(x+w, y+h); //bottom-right vertex
counts.push_back(4); //each quad will always have exactly 4 vertices
startingElements.push_back(startingElements.size()*4);
std::cout << "Number of Quads: " << counts.size() << std::endl; //print out the current number of quads
}
//removes the most recently created quad
void removeQuad()
{
if (counts.size() > 0) //we don't want to remove a quad if there aren't any to remove
{
for (int i=0; i<4; i++)
{
vertices.pop_back();
}
startingElements.pop_back();
counts.pop_back();
std::cout << "Number of Quads: " << counts.size() << std::endl;
}
else
{
std::cout << "Sorry, you can't remove a quad if there are no quads to remove!" << std::endl;
}
}
void init()
{
//initialize SDL
SDL_Init(SDL_INIT_VIDEO | SDL_INIT_TIMER);
screen = SDL_SetVideoMode(SCREEN_WIDTH, SCREEN_HEIGHT, 0, SDL_OPENGL);
#if defined(__APPLE__)
//Enable vsync so that we don't get tearing when rendering
GLint swapInterval = 1;
CGLSetParameter(CGLGetCurrentContext(), kCGLCPSwapInterval, &swapInterval);
#endif
//Disable depth testing, lighting, and dithering, since we're going to be doing 2D rendering only
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
glDisable(GL_DITHER);
glPushAttrib(GL_DEPTH_BUFFER_BIT | GL_LIGHTING_BIT);
//Set the projection matrix
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, SCREEN_WIDTH, SCREEN_HEIGHT, 0, -1.0, 1.0);
//Set the modelview matrix
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
//Create VBO
glGenBuffers(1, &VBO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
}
void gameLoop()
{
int frameDuration = 1000/FPS; //the set duration (in milliseconds) of a single frame
int currentTicks;
int pastTicks = SDL_GetTicks();
bool done = false;
SDL_Event event;
while(!done)
{
//handle user input
while(SDL_PollEvent(&event))
{
switch(event.type)
{
case SDL_KEYDOWN:
switch (event.key.keysym.sym)
{
case SDLK_UP: //create a new quad every time the up arrow key is pressed
createQuad(64*counts.size(), 64*counts.size(), 64, 64);
break;
case SDLK_DOWN: //remove the most recently created quad every time the down arrow key is pressed
removeQuad();
break;
default:
break;
}
break;
case SDL_QUIT:
done = true;
break;
default:
break;
}
}
//Clear the color buffer
glClear(GL_COLOR_BUFFER_BIT);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
//replace the current contents of the VBO with a completely new set of data (possibly including either more or fewer quads)
glBufferData(GL_ARRAY_BUFFER, vertices.size()*sizeof(Vertex), &vertices.front(), GL_DYNAMIC_DRAW);
glEnableClientState(GL_VERTEX_ARRAY);
//Set vertex data
glVertexPointer(2, GL_INT, sizeof(Vertex), 0);
//Draw the quads
glMultiDrawArrays(GL_TRIANGLE_STRIP, &startingElements.front(), &counts.front(), counts.size());
glDisableClientState(GL_VERTEX_ARRAY);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//Check to see if we need to delay the duration of the current frame to match the set framerate
currentTicks = SDL_GetTicks();
int currentDuration = (currentTicks - pastTicks); //the duration of the frame so far
if (currentDuration < frameDuration)
{
SDL_Delay(frameDuration - currentDuration);
}
pastTicks = SDL_GetTicks();
// flip the buffers
SDL_GL_SwapBuffers();
}
}
void cleanUp()
{
glDeleteBuffers(1, &VBO);
SDL_FreeSurface(screen);
SDL_Quit();
}
int main(int argc, char *argv[])
{
std::cout << "To create a quad, press the up arrow. To remove the most recently created quad, press the down arrow." << std::endl;
init();
gameLoop();
cleanUp();
return 0;
}
At the moment I'm using GL_TRIANGLE_STRIPS with glMultiDrawArrays() to render my quads. This works, and seems do be pretty decent in terms of performance, but I have to wonder whether using GL_TRIANGLES in conjunction with an IBO to avoid duplicate vertices would be a more efficient way to render? I've done some research, and some people suggest that indexed GL_TRIANGLES generally outperform GL_TRIANGLE_STRIPS, but they also seem to assume that the number of quads would remain constant, and thus the size of the VBO and IBO would not have to be rebuffered each frame. That's my biggest hesitation with indexed GL_TRIANGLES: if I did implement indexed GL_TRIANGLES, I would have to rebuffer the entire index buffer each frame in addition to rebuffering the entire VBO each frame, again because of the dynamic number of quads.
So basically, my question is this: Given that I have to rebuffer all of my vertex data to the GPU each frame due to the dynamic number of quads, would it be more efficient to switch to indexed GL_TRIANGLES to draw the quads, or should I stick with my current GL_TRIANGLE_STRIP implementation?
You'll probably be fine using un-indexed GL_QUADS/GL_TRIANGLES and a glDrawArrays() call.
SDL_Surface *screen;
...
screen = SDL_SetVideoMode(SCREEN_WIDTH, SCREEN_HEIGHT, 0, SDL_OPENGL);
...
SDL_FreeSurface(screen);
Don't do that:
The returned surface is freed by SDL_Quit and must not be freed by the caller. This rule also includes consecutive calls to SDL_SetVideoMode (i.e. resize or resolution change) because the existing surface will be released automatically.
EDIT: Simple vertex array demo:
// g++ main.cpp -lglut -lGL
#include <GL/glut.h>
#include <vector>
using namespace std;
// OpenGL Mathematics (GLM): http://glm.g-truc.net/
#include <glm/glm.hpp>
#include <glm/gtc/random.hpp>
using namespace glm;
struct SpriteWrangler
{
SpriteWrangler( unsigned int aSpriteCount )
{
verts.resize( aSpriteCount * 6 );
states.resize( aSpriteCount );
for( size_t i = 0; i < states.size(); ++i )
{
states[i].pos = linearRand( vec2( -400, -400 ), vec2( 400, 400 ) );
states[i].vel = linearRand( vec2( -30, -30 ), vec2( 30, 30 ) );
Vertex vert;
vert.r = (unsigned char)linearRand( 64.0f, 255.0f );
vert.g = (unsigned char)linearRand( 64.0f, 255.0f );
vert.b = (unsigned char)linearRand( 64.0f, 255.0f );
vert.a = 255;
verts[i*6 + 0] = verts[i*6 + 1] = verts[i*6 + 2] =
verts[i*6 + 3] = verts[i*6 + 4] = verts[i*6 + 5] = vert;
}
}
void wrap( const float minVal, float& val, const float maxVal )
{
if( val < minVal )
val = maxVal - fmod( maxVal - val, maxVal - minVal );
else
val = minVal + fmod( val - minVal, maxVal - minVal );
}
void Update( float dt )
{
for( size_t i = 0; i < states.size(); ++i )
{
states[i].pos += states[i].vel * dt;
wrap( -400.0f, states[i].pos.x, 400.0f );
wrap( -400.0f, states[i].pos.y, 400.0f );
float size = 20.0f;
verts[i*6 + 0].pos = states[i].pos + vec2( -size, -size );
verts[i*6 + 1].pos = states[i].pos + vec2( size, -size );
verts[i*6 + 2].pos = states[i].pos + vec2( size, size );
verts[i*6 + 3].pos = states[i].pos + vec2( size, size );
verts[i*6 + 4].pos = states[i].pos + vec2( -size, size );
verts[i*6 + 5].pos = states[i].pos + vec2( -size, -size );
}
}
struct Vertex
{
vec2 pos;
unsigned char r, g, b, a;
};
struct State
{
vec2 pos;
vec2 vel; // units per second
};
vector< Vertex > verts;
vector< State > states;
};
void display()
{
// timekeeping
static int prvTime = glutGet(GLUT_ELAPSED_TIME);
const int curTime = glutGet(GLUT_ELAPSED_TIME);
const float dt = ( curTime - prvTime ) / 1000.0f;
prvTime = curTime;
// sprite updates
static SpriteWrangler wrangler( 2000 );
wrangler.Update( dt );
vector< SpriteWrangler::Vertex >& verts = wrangler.verts;
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
// set up projection and camera
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
double w = glutGet( GLUT_WINDOW_WIDTH );
double h = glutGet( GLUT_WINDOW_HEIGHT );
double ar = w / h;
glOrtho( -400 * ar, 400 * ar, -400, 400, -1, 1);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnableClientState( GL_VERTEX_ARRAY );
glEnableClientState( GL_COLOR_ARRAY );
glVertexPointer( 2, GL_FLOAT, sizeof( SpriteWrangler::Vertex ), &verts[0].pos.x );
glColorPointer( 4, GL_UNSIGNED_BYTE, sizeof( SpriteWrangler::Vertex ), &verts[0].r );
glDrawArrays( GL_TRIANGLES, 0, verts.size() );
glDisableClientState( GL_VERTEX_ARRAY );
glDisableClientState( GL_COLOR_ARRAY );
glutSwapBuffers();
}
// run display() every 16ms or so
void timer( int extra )
{
glutTimerFunc( 16, timer, 0 );
glutPostRedisplay();
}
int main(int argc, char **argv)
{
glutInit( &argc, argv );
glutInitWindowSize( 600, 600 );
glutInitDisplayMode( GLUT_RGBA | GLUT_DEPTH | GLUT_DOUBLE );
glutCreateWindow( "Sprites" );
glutDisplayFunc( display );
glutTimerFunc( 0, timer, 0 );
glutMainLoop();
return 0;
}
You can get decent performance with just vertex arrays.
Ideally most/all of your dts should be <= 16 milliseconds.

GLUT: Wrong Mouse Coordinates?

Using SCREEN_WIDTH = 1200, and SCREEN_HEIGHT = 800.
First, I draw a box to the screen at (x = 0, y = 0, w = 40, h = 40);
Then I use the handleMouse Function to return the x and y coordinates of where the mouse is clicked.
The problem is, when the program starts in non-Maximized windowed mode, when I click on (What appears to be) the very bottom right corner of the box the coordinates returned are x = 40, y = 32. When I think it should be returning x = 40, y = 40.
I don't know whether the problem is if its not being drawn right, or the functions is returning the wrong x/y.
I believe I understand how openGL rendering, transformation and glOrth work, but I could be completely wrong. I have seen a few suggestions online saying that the Windows Decor(Using windows 7) can cause this problem, but have done very little explaining and provided no solution.
This is my entire source code. I have stripped off everything from my game down to the basics, and the problem still persists :( . I added two pictures so people could see my problem. In NON-MAXIMIZED WINDOW(the top picture), when clicking the bottom-right corner, the coordinates returned are 41,32; The y coordinate is smaller than it should be. And in the MAXIMIZED WINDOW(the bottom picture), when clicking the same corner, It returns the correct coordinates 40, 40. These results occur for both my original source code and genpfault's suggested code.
//Turns out I can't post Pictures :(, links instead.
non-Maximized Windowed!
Maximized Windowed!
main.cpp
int main( int argc, char* args[] )
{
//Initialize FreeGLUT
glutInit( &argc, args );
//Create OpenGL 2.1 context
glutInitContextVersion( 2, 1 );
//Create Double Buffered Window
glutInitDisplayMode( GLUT_DOUBLE );
glutInitWindowSize( SCREEN_WIDTH, SCREEN_HEIGHT );
glutCreateWindow( "OpenGL" );
//Do post window/context creation initialization
if( !initGL() )
{
printf( "Unable to initialize graphics library!\n" );
return 1;
}
//initGame();
glutMouseFunc(handleMouse);
glutDisplayFunc( render );
glutMainLoop();
return 0;
}
Functions.h
#ifndef FUNCTIONS_H
#define FUNCTIONS_H
bool initGL();
void render();
void handleMouse(int button, int state, int x, int y);
#endif
Functions.cpp
bool initGL()
{
//Initialize clear color
glClearColor( 0.f, 0.f, 0.f, 1.f );
//Check for error
GLenum error = glGetError();
if( error != GL_NO_ERROR )
{
//cout <<"Error initializing OpenGL! " << gluErrorString( error ) << endl;
return false;
}
return true;
}
void render()
{
//Clear color buffer
glClear( GL_COLOR_BUFFER_BIT );
glColor3f(1.f,1.f,1.f);
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
gluOrtho2D(0, SCREEN_WIDTH, SCREEN_HEIGHT, 0);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glBegin(GL_QUADS);
glVertex2f(0,0);
glVertex2f(0, 41);
glVertex2f(41, 41);
glVertex2f(41, 0);
glEnd();
glutSwapBuffers();
}
void handleMouse(int button, int state, int x, int y)
{
std::cout << x << '\t' << y << std::endl;
}
constants.h
const int SCREEN_WIDTH = 1200;
const int SCREEN_HEIGHT = 800;
This works fine for me on Windows 7 on Aero and Classic:
#include <GL/glut.h>
#include <iostream>
void render()
{
glClearColor( 0, 0, 0, 1 );
glClear( GL_COLOR_BUFFER_BIT );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
double w = glutGet( GLUT_WINDOW_WIDTH );
double h = glutGet( GLUT_WINDOW_HEIGHT );
glOrtho(0, w, h, 0, -1, 1);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glColor3ub( 255, 255, 255 );
glBegin(GL_QUADS);
glVertex2f(0,0);
glVertex2f(41, 0);
glVertex2f(41, 41);
glVertex2f(0, 41);
glEnd();
glutSwapBuffers();
}
void handleMouse(int button, int state, int x, int y)
{
std::cout << x << '\t' << y << std::endl;
}
int main( int argc, char* args[] )
{
glutInit( &argc, args );
glutInitDisplayMode( GLUT_DOUBLE );
glutInitWindowSize( 640, 480 );
glutCreateWindow( "OpenGL" );
glutMouseFunc(handleMouse);
glutDisplayFunc( render );
glutMainLoop();
return 0;
}
Most notably I added the runtime glutGet() window size queries to render().
Try moving the quad away from the edge of the display because when I tryed to do something like that the edge of the screen was very odd
glBegin(GL_QUADS);
glVertex2f(20,20);
glVertex2f(20, 61);
glVertex2f(61, 61);
glVertex2f(61, 20);
glEnd();