I am writing in C++ and using OpenGL in Windows.
I've created a cube and I want it to be rotated around the y axis ( using glRotate3f(), not gluLookat() ) by pressing '4' or '6' numpad keys.
The problem is that when I press any key, there is a slight rotation and then stops for a while and then it starts rotating continuously.
Is there any way to achieve this by using glutkeyboardfunc?
If not, how can it be achieved?
Maintain your own key state map and trigger redraws on a timer:
// g++ main.cpp -o main -lglut -lGL -lGLU && ./main
#include <GL/glut.h>
#include <map>
bool paused = false;
std::map< unsigned char, bool > state;
void keyboard_down( unsigned char key, int x, int y )
{
if( key == 'p' )
paused = !paused;
state[ key ] = true;
}
void keyboard_up( unsigned char key, int x, int y )
{
state[ key ] = false;
}
void display()
{
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
double w = glutGet( GLUT_WINDOW_WIDTH );
double h = glutGet( GLUT_WINDOW_HEIGHT );
double ar = w / h;
glOrtho( -2 * ar, 2 * ar, -2, 2, -1, 1);
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
static float angle = 0;
if( state[ '6' ] ) angle -= 3;
if( state[ '4' ] ) angle += 3;
if( !paused )
angle += 0.5;
glPushMatrix();
glRotatef( angle, 0, 0, 1 );
glColor3ub( 255, 0, 0 );
glBegin( GL_QUADS );
glVertex2i( -1, -1 );
glVertex2i( 1, -1 );
glVertex2i( 1, 1 );
glVertex2i( -1, 1 );
glEnd();
glPopMatrix();
glutSwapBuffers();
}
void timer( int extra )
{
glutPostRedisplay();
glutTimerFunc( 16, timer, 0 );
}
int main( int argc, char **argv )
{
glutInit( &argc, argv );
glutInitDisplayMode( GLUT_RGBA | GLUT_DEPTH | GLUT_DOUBLE );
glutInitWindowSize( 640, 480 );
glutCreateWindow( "Keyboard" );
glutDisplayFunc( display );
glutKeyboardFunc( keyboard_down );
glutKeyboardUpFunc( keyboard_up );
glutTimerFunc( 0, timer, 0 );
glutIgnoreKeyRepeat( GL_TRUE );
glutMainLoop();
return 0;
}
Related
I have a game written using SDL2, and the SDL2 renderer (hardware accelerated) for drawing. Is there a trick to draw filled quads or triangles?
At the moment I'm filling them by just drawing lots of lines (SDL_Drawlines), but the performance stinks.
I don't want to go into OpenGL.
SDL_RenderGeometry()/SDL_RenderGeometryRaw() were added in SDL 2.0.18:
Added SDL_RenderGeometry() and SDL_RenderGeometryRaw() to allow rendering of arbitrary shapes using the SDL 2D render API
Example:
// g++ main.cpp `pkg-config --cflags --libs sdl2`
#include <SDL.h>
#include <vector>
int main( int argc, char** argv )
{
SDL_Init( SDL_INIT_EVERYTHING );
SDL_Window* window = SDL_CreateWindow("SDL", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, 800, 600, SDL_WINDOW_SHOWN );
SDL_Renderer* renderer = SDL_CreateRenderer( window, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC );
const std::vector< SDL_Vertex > verts =
{
{ SDL_FPoint{ 400, 150 }, SDL_Color{ 255, 0, 0, 255 }, SDL_FPoint{ 0 }, },
{ SDL_FPoint{ 200, 450 }, SDL_Color{ 0, 0, 255, 255 }, SDL_FPoint{ 0 }, },
{ SDL_FPoint{ 600, 450 }, SDL_Color{ 0, 255, 0, 255 }, SDL_FPoint{ 0 }, },
};
bool running = true;
while( running )
{
SDL_Event ev;
while( SDL_PollEvent( &ev ) )
{
if( ( SDL_QUIT == ev.type ) ||
( SDL_KEYDOWN == ev.type && SDL_SCANCODE_ESCAPE == ev.key.keysym.scancode ) )
{
running = false;
break;
}
}
SDL_SetRenderDrawColor( renderer, 0, 0, 0, SDL_ALPHA_OPAQUE );
SDL_RenderClear( renderer );
SDL_RenderGeometry( renderer, nullptr, verts.data(), verts.size(), nullptr, 0 );
SDL_RenderPresent( renderer );
}
SDL_DestroyRenderer( renderer );
SDL_DestroyWindow( window );
SDL_Quit();
return 0;
}
Note that due to the API lacking a data channel for Z coordinates only affine texturing is achievable.
Not possible. SDL2 does not include a full-fledged rendering engine.
Some options:
You could adopt Skia (the graphics library used in Chrome, among ohters) and then either stick with a software renderer, or instantiate an OpenGL context and use the hardware backend.
You could use another 2D drawing library such as Raylib
Or just bite the bullet and draw your triangles using OpenGL.
I created an OpenGL window using SDL2 but the background keeps switching between black and yellow.
#include <SDL2/SDL.h>
#include <GL/glew.h>
#define SCREEN_WIDTH 500
#define SCREEN_HEIGHT 500
int main( int argc, char** argv )
{
SDL_GL_SetAttribute( SDL_GL_CONTEXT_MAJOR_VERSION, 3 );
SDL_GL_SetAttribute( SDL_GL_CONTEXT_MINOR_VERSION, 3 );
SDL_GL_SetAttribute( SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE );
SDL_GL_SetAttribute( SDL_GL_DOUBLEBUFFER, 1 );
SDL_GL_SetAttribute( SDL_GL_ACCELERATED_VISUAL, 1 );
SDL_Window* gWindow = SDL_CreateWindow(
"Title",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
SCREEN_WIDTH,
SCREEN_HEIGHT,
SDL_WINDOW_OPENGL);
SDL_GL_CreateContext( gWindow );
glewExperimental = GL_TRUE;
glewInit();
glPointSize(3);
glClearColor(1,1,0,0);
glViewport(0, 0, SCREEN_WIDTH, SCREEN_HEIGHT);
glClear(GL_COLOR_BUFFER_BIT);
SDL_GL_SetSwapInterval(1);
int quit=0;
SDL_Event event;
while( !quit )
{
while(SDL_PollEvent( &event ))
{
if( event.type == SDL_QUIT )
quit = 1;
}
SDL_GL_SwapWindow( gWindow );
}
SDL_DestroyWindow(gWindow);
return 0;
}
I expect the background to be yellow, as defined with glClearColor(1,1,0,0), without flickering while the program runs. Is there something wrong in the code?
The reason for flickering is that you're using double buffering but do not clear one of the buffers with the yellow color (i.e. note that you're calling glClear only once in your code).
I suggest you call glClear every frame. To fix your code, you can move the call into the loop, just before the SDL_GL_SwapWindow:
while( !quit )
{
while(SDL_PollEvent( &event ))
{
if( event.type == SDL_QUIT )
quit = 1;
}
glClear(GL_COLOR_BUFFER_BIT);
SDL_GL_SwapWindow( gWindow );
}
I want to render a simple cube using the function 'glMultiDrawElements' (even if there is just one object!) but there is a bad display! However if I use the function 'glDrawElements' everything works! I want to precise 'glewInit' has been correctly initialized.
Here's the code using 'glDrawElements' :
[...]
std::vector<std::vector<int32_t>> indexArray = pRenderBatch->GetIndexAttribArray();
std::vector<int32_t> countArray = pRenderBatch->GetCountElementArray(indexArray);
// indexArray[0] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 0, 2, 19, 3, 5, 20, 21, 22, 10, 23, 11, 13, 24, 14, 25, 15, 17}
//Size(indexArray) = 1, Size(indexArray[0])=36
// countArray = {36} //Size(countArray)=1
glDrawElements(GL_TRIANGLES, countArray[0], GL_UNSIGNED_INT, (const GLvoid *)&indexArray[0][0]);
[...]
Now, here's the code using 'glMultiDrawElements' :
[...]
std::vector<std::vector<int32_t>> indexArray = pRenderBatch->GetIndexAttribArray();
std::vector<int32_t> countArray = pRenderBatch->GetCountElementArray(indexArray);
glMultiDrawElements(GL_TRIANGLES, &countArray[0], GL_UNSIGNED_INT, (const GLvoid **)&indexArray[0], countArray.size());
[...]
Here's the display:
However according the OpenGL documentation (http://www.opengl.org/sdk/docs/man3/xhtml/glMultiDrawElements.xml), the function 'glMultiDrawElements' has the following signature:
void glMultiDrawElements(GLenum mode, const GLsizei * count, GLenum type, const GLvoid ** indices, GLsizei primcount);
I don't understand why the display is not correct. Does anyone can help me?
std::vector<std::vector<int32_t>> indexArray
glMultiDrawElements() is expecting a pointer to an array of indexes. It doesn't know anything about the layout of std::vector<int32_t>.
Try this instead:
std::vector< int32_t* > indexArray2
You can construct it like this:
std::vector< int32_t* > indexArray2( indexArray.size() );
for( size_t i = 0; i < indexArray.size(); ++i )
{
indexArray2[i] = &indexArray[i][0];
}
EDIT: Complete example:
#include <GL/glew.h>
#include <GL/glut.h>
#include <vector>
using namespace std;
void display()
{
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
glMatrixMode( GL_PROJECTION );
glLoadIdentity();
glOrtho( -2, 2, -2, 2, -1, 1 );
glMatrixMode( GL_MODELVIEW );
glLoadIdentity();
GLfloat verts[] =
{
0, 0,
1, 0,
1, 1,
0, 0,
-1, 0,
-1, -1,
};
vector< GLsizei > counts;
vector< vector< unsigned int > > indexes;
{
vector< unsigned int > index;
index.push_back( 0 );
index.push_back( 1 );
index.push_back( 2 );
counts.push_back( index.size() );
indexes.push_back( index );
}
{
vector< unsigned int > index;
index.push_back( 3 );
index.push_back( 4 );
index.push_back( 5 );
counts.push_back( index.size() );
indexes.push_back( index );
}
vector< unsigned int* > indexes2( indexes.size() );
for( size_t i = 0; i < indexes.size(); ++i )
{
indexes2[i] = &indexes[i][0];
}
glEnableClientState( GL_VERTEX_ARRAY );
glVertexPointer( 2, GL_FLOAT, 0, verts );
glMultiDrawElements
(
GL_TRIANGLES,
&counts[0],
GL_UNSIGNED_INT,
(const GLvoid **)&indexes2[0],
counts.size()
);
glutSwapBuffers();
}
int main( int argc, char **argv )
{
glutInit( &argc, argv );
glutInitDisplayMode( GLUT_RGBA | GLUT_DOUBLE );
glutInitWindowSize( 600, 600 );
glutCreateWindow( "GLUT" );
glewInit();
glutDisplayFunc( display );
glutMainLoop();
return 0;
}
I'm an experienced programmer but totally new to C++. I'm using GLFW 3 and Horde3D in Xcode 5 on OS X 10.5.
I've followed the basic tutorials of GLFW and Horde3D. I'm able to create a window, make it the current context, and apparently a simple game loop is running fine, including h3dRender( cam ). But all I get is a black window. Any insight as to the step (or entire concept) I'm missing?
Thank you! (code below)
#include <iostream>
#include <GLFW/glfw3.h>
#include <Horde3D.h>
#include <Horde3DUtils.h>
GLFWwindow* window;
H3DNode model = 0, cam = 0;
int winWidth = 640, winHeight = 480;
float fps = 24;
static float t = 0;
bool running = false;
bool initWindow();
bool initGame();
void gameLoop();
void errorListener( int, const char* );
void windowCloseListener( GLFWwindow* );
int main(void)
{
glfwSetErrorCallback( errorListener );
if ( !initWindow() ) return -1;
if ( !initGame() ) return -1;
running = true;
while ( running )
{
gameLoop();
}
h3dRelease();
glfwDestroyWindow( window );
glfwTerminate();
exit( EXIT_SUCCESS );
}
bool initWindow()
{
if ( !glfwInit() ) return -1;
window = glfwCreateWindow( winWidth, winHeight, "Hello World", NULL, NULL );
if ( !window )
{
glfwTerminate();
exit( EXIT_FAILURE );
}
glfwSetWindowCloseCallback( window, windowCloseListener );
glfwMakeContextCurrent( window );
glfwSwapInterval( 0 );
return true;
}
bool initGame()
{
if ( !h3dInit() ) return false;
H3DRes pipeRes = h3dAddResource( H3DResTypes::Pipeline, "standard.pipeline", 0 );
H3DRes modelRes = h3dAddResource( H3DResTypes::SceneGraph, "character.scene.xml", 0 );
H3DRes animRes = h3dAddResource( H3DResTypes::Animation, "walk.anim.xml", 0 );
h3dutLoadResourcesFromDisk( "" );
model = h3dAddNodes( H3DRootNode, modelRes );
h3dSetupModelAnimStage( model, 0, animRes, 0, "", false );
H3DNode light = h3dAddLightNode( H3DRootNode, "Light 1", 0, "LIGHTING", "SHADOWMAP" );
h3dSetNodeTransform( light, 0, 20, 0, 0, 0, 0, 1, 1, 1 );
h3dSetNodeParamF( light, H3DLight::RadiusF, 0, 50.0f );
cam = h3dAddCameraNode( H3DRootNode, "Camera", pipeRes );
h3dSetNodeParamI( cam, H3DCamera::ViewportXI, 0 );
h3dSetNodeParamI( cam, H3DCamera::ViewportYI, 0 );
h3dSetNodeParamI( cam, H3DCamera::ViewportWidthI, winWidth );
h3dSetNodeParamI( cam, H3DCamera::ViewportHeightI, winHeight );
h3dSetupCameraView( cam, 45.0f, ( float ) winWidth / winHeight, 0.5f, 2048.0f );
h3dResizePipelineBuffers( pipeRes, winWidth, winHeight );
return true;
}
void gameLoop ()
{
t = t + 10.f * ( 1/ fps );
h3dSetModelAnimParams( model, 0, t, 1.0f );
h3dSetNodeTransform( model, t * 10, 0, 0, 0, 0, 0, 1, 1, 1 );
h3dRender( cam );
h3dFinalizeFrame();
glfwSwapBuffers( window );
glfwPollEvents();
}
void errorListener( int error, const char* description )
{
fputs( description, stderr );
}
void windowCloseListener( GLFWwindow* window )
{
running = false;
}
You are not able to view anything because the resources have not been loaded for your application. You need to specify the directory path which holds the resources(resources like character.scene.xml, walk.anim.xml etc.) as the an argument to the h3dutLoadResourcesFromDisk()
I've written some basic setup code for an OpenGL application:
#include <Windows.h>
#include <gl/gl.h>
#include <SDL.h>
int main()
{
SDL_Init( SDL_INIT_VIDEO );
SDL_Surface* surface = SDL_SetVideoMode( 800, 600, 32, SDL_HWSURFACE | SDL_DOUBLEBUF | SDL_OPENGL );
glViewport( 0, 0, 800, 600 );
SDL_Event windowEvent;
while ( true )
{
if ( SDL_PollEvent( &windowEvent ) )
{
if ( windowEvent.type == SDL_QUIT ) break;
}
glClearColor( 1.0f, 0.0f, 0.0f, 1.0f );
glClear( GL_COLOR_BUFFER_BIT );
SDL_GL_SwapBuffers();
}
SDL_Quit();
return 0;
}
Unfortunately this fails to link with the following error:
1>SDLmain.lib(SDL_win32_main.obj) : error LNK2001: unresolved external symbol _SDL_main
Linker settings:
http://puu.sh/kVae
Use main's full signature:
int main(int argc, char *argv[])
or
int main(int argc, char **argv)
or try implementing _SDL_Main instead of main.