How to capture accurate framerate in OpenGL - c++

What is a good way to get an accurate framerate (frames per second) in native windows opengl c++?

Here's a timer class I used to use back in the day, in an ATL project. Haven't done C++ or opengl for awhile, but maybe this will give you some ideas:
Usage
// Put this in your class somewhere
CTimer m_timer;
// Initialize the timer using
m_timer.Init();
// Call this everytime you call draw your scene
m_timer.Update();
// Call this to get the frames/sec
m_timer.GetFPS();
Timer Class
// Timer.h: Timer class used for determining elapsed time and
// frames per second.
//
//////////////////////////////////////////////////////////////////////
#ifndef _E_TIMER_H
#define _E_TIMER_H
#pragma once
//////////////////////////////////////////////////////////////////////
// INCLUDES
//////////////////////////////////////////////////////////////////////
#include <windows.h>
#include <stdio.h>
#include <math.h>
//////////////////////////////////////////////////////////////////////
// CLASSES
//////////////////////////////////////////////////////////////////////
class CTimer
{
private:
//performance timer variables
__int64 m_i64PerformanceTimerStart;
__int64 m_i64PerformanceTimerElapsed;
//multimedia timer variables
unsigned long m_ulMMTimerElapsed;
unsigned long m_ulMMTimerStart;
//general timer variables
__int64 m_i64Frequency;
float m_fResolution;
bool m_bPerformanceTimer;
//FPS variables
float m_fTime1;
float m_fTime2;
float m_fDiffTime;
float m_fFPS;
int m_iFramesElapsed;
public:
//----------------------------------------------------------
// Name: CTimer::CTimer
// Desc: Default constructor
// Args: None
// Rets: None
//----------------------------------------------------------
CTimer( void )
: m_fFPS(0.0f), m_fTime1(0.0f), m_fTime2(0.0f), m_fDiffTime(0.0f), m_iFramesElapsed(0)
{ }
//----------------------------------------------------------
// Name: CTimer::CTimer
// Desc: Default destructor
// Args: None
// Rets: None
//----------------------------------------------------------
virtual ~CTimer( void )
{ }
//----------------------------------------------------------
// Name: CTimer::Init - public
// Desc: Initiate the timer for the program
// Args: None
// Rets: bool: -true: using performance timer
// -false: using multimedia timer
//----------------------------------------------------------
bool Init( void )
{
//check to see if we are going to be using the performance counter
if( QueryPerformanceFrequency( ( LARGE_INTEGER* )&m_i64Frequency ) )
{
//we are able to use the performance timer
m_bPerformanceTimer= true;
//get the current time and store it in m_i64PerformanceTimerStart
QueryPerformanceCounter( ( LARGE_INTEGER* )&m_i64PerformanceTimerStart );
//calculate the timer resolution
m_fResolution= ( float )( ( ( double )1.0f )/( ( double )m_i64Frequency ) );
//initialize the elapsed time variable
m_i64PerformanceTimerElapsed= m_i64PerformanceTimerStart;
}
//we cannot use the performence counter, so we'll use the multimedia counter
else
{
//we're using the multimedia counter
m_bPerformanceTimer= false;
m_ulMMTimerStart = timeGetTime( ); //record the time the program started
m_ulMMTimerElapsed = m_ulMMTimerStart; //initialize the elapsed time variable
m_fResolution = 1.0f/1000.0f;
m_i64Frequency = 1000;
}
return m_bPerformanceTimer;
}
//----------------------------------------------------------
// Name: CTimer::Update - public
// Desc: Update the timer (perform FPS counter calculations)
// Args: None
// Rets: None
//----------------------------------------------------------
void Update( void )
{
//increase the number of frames that have passed
m_iFramesElapsed++;
if ( m_iFramesElapsed % 5 == 1 )
m_fTime1 = GetTime( )/1000;
else if ( m_iFramesElapsed % 5 == 0 )
{
m_fTime1 = m_fTime2;
m_fTime2 = GetTime( )/1000;
m_fDiffTime= ( float )fabs( m_fTime2-m_fTime1 );
}
m_fFPS= 5/( m_fDiffTime );
/*m_fTime2 = GetTime( )/1000;
m_fDiffTime= ( float )fabs( m_fTime2-m_fTime1 );
if (m_fDiffTime > 1.0f)
{
m_fTime1 = m_fTime2;
m_fFPS= m_iFramesElapsed / ( m_fDiffTime );
m_iFramesElapsed = 0;
}
*/
}
//----------------------------------------------------------
// Name: CTimer::GetTime - public
// Desc: Get the current time since the program started
// Args: None
// Rets: float: The time elapsed since the program started.
//----------------------------------------------------------
float GetTime( void )
{
__int64 i64Time;
//check to see if we are using the performance counter
if( m_bPerformanceTimer )
{
//get the current performance time
QueryPerformanceCounter( ( LARGE_INTEGER* )&i64Time );
//return the time since the program started
return ( ( float )( i64Time - m_i64PerformanceTimerStart )*m_fResolution )*1000.0f;
}
//we are using the multimedia counter
else
{
//return the time since the program started
return ( ( float )( timeGetTime( ) - m_ulMMTimerStart )*m_fResolution )*1000.0f;
}
}
//----------------------------------------------------------
// Name: CTimer::GetElapsedSeconds - public
// Desc: Get the elapsed seconds since the last frame was drawn.
// Args: elapsedFrames:
// Rets: float: The time elapsed since the program started.
//----------------------------------------------------------
float GetElapsedSeconds(unsigned long elapsedFrames = 1)
{ return m_fDiffTime; }
//----------------------------------------------------------
// Name: CTimer::GetFPS - public
// Desc: Get the current number of frames per second
// Args: None
// Rets: float: the number of frames per second
//----------------------------------------------------------
inline float GetFPS( void )
{ return m_fFPS; }
};
#endif // _E_TIMER_H

in c++, my favorite timer is the same as Steve suggests.
there may also be the issue of disabling vsync in your opengl app, for me it has always been on by default and you have to load some function to disable it.
as for a maybe more platform independent solution,
use time.h
I can't remember the function :( but it returns how long your app has been running in seconds, in that case just count the number of frames that have passed between seconds and that’s your fps (hypothetical function GetTime() )
// in your loop:
//////////
static int lastTime = GetTime();
static int framesDone = 0;
int currentTime = GetTime();
if(currentTime > lastTime)
{
int fps = framesDone;
framesDone = 0;
lastTime = currentTime;
}
framesDone++;
/////////
but yeah, for windows the first answer is the best.
if you need help disabling vsync, let us know.

Related

C++ How to correctly cap FPS (Using GLFW)

So I have been trying to limit my fps to 60:
//Those are members inside the Display class
double tracker = glfwGetTime();
const float frameCap = 1 / 60.0f;
void Display::present() {
glfwSwapBuffers( _window );
//Getting the time between each call
double now = glfwGetTime( );
double frameTime=now - tracker;
tracker=now;
//delaying if required
if ( frameTime < frameCap )
delay( frameCap - frameTime );
}
void game() {
//Creating window and opengl context
.....
//Disabling "vsync" so i can cap fps by my own
glfwSwapInterval(0);
while(running) {
//Rendering and updating
.........
//Swap buffers and delay if required
display.present();
}
}
My delay/sleep function
#ifdef _WIN32
#include <Windows.h>
#else
#include <unistd.h>
#endif
void delay( uint32_t ms )
{
#ifdef _WIN32
Sleep( ms );
#else
usleep( ms * 1000 );
#endif
}
Basically the idea to cap the framerate in each Display::present() call.
It looks like that nothing is being capped at all in fact the fps is 4000+
For the first call of present your double frameTime=glfwGetTime( ) - tracker;, sets frameTime to the difference between the current time (glfwGetTime() and the inital value of tracker you set with glfwGetTime().
In the next line, you set tracker=frameTime; (frameTime is not the time but the difference here.)
For the next call of present, the value tracker is really small (as it is the difference and not the time), as of that your double frameTime=glfwGetTime( ) - tracker; becomes really large (larger than frameCap), so there the sleep won't happen.
For the next call of present the condition of ( frameTime < frameCap ) might be true again, but for the flow up it definitely won't be anymore again.
So for at least every second invocation of present the delay won't be called.
Besides that glfwGetTime returns seconds, frameCap also represents how many seconds a frame should last, but your void delay( uint32_t ms ) expects milliseconds.

time based movement sliding object

At the moment i have a function that moves my object based on FPS, if the frames have not passed it wont do anything.
It works fine if the computer can run it at that speed.
How would i use time based and move it based on the time?
Here is my code:
typedef unsigned __int64 u64;
auto toolbarGL::Slide() -> void
{
LARGE_INTEGER li = {};
QueryPerformanceFrequency(&li);
u64 freq = static_cast<u64>(li.QuadPart); // clock ticks per second
u64 period = 60; // fps
u64 delay = freq / period; // clock ticks between frame paints
u64 start = 0, now = 0;
QueryPerformanceCounter(&li);
start = static_cast<u64>(li.QuadPart);
while (true)
{
// Waits to be ready to slide
// Keeps looping till stopped then starts to wait again
SlideEvent.wait();
QueryPerformanceCounter(&li);
now = static_cast<u64>(li.QuadPart);
if (now - start >= delay)
{
if (slideDir == SlideFlag::Right)
{
if (this->x < 0)
{
this->x += 5;
this->controller->Paint();
}
else
SlideEvent.stop();
}
else if (slideDir == SlideFlag::Left)
{
if (this->x > -90)
{
this->x -= 5;
this->controller->Paint();
}
else
SlideEvent.stop();
}
else
SlideEvent.stop();
start = now;
}
}
}
You can update your objects by time difference. We need to have start timestamp and then count difference on each iteration of global loop. So global loop is very important too, it has to work all the time. My example shows just call update method for your objects. All your objects should depend on time not FPS. Fps shows different behavior on different computers and even same computer can show different fps because of others processes running in background.
#include <iostream>
#include <chrono>
#include <unistd.h>
//Function to update all objects
void Update( float dt )
{
//For example
//for( auto Object : VectorObjects )
//{
// Object->Update(dt);
//}
}
int main()
{
typedef std::chrono::duration<float> FloatSeconds;
auto OldMs = std::chrono::system_clock::now().time_since_epoch();
const uint32_t SleepMicroseconds = 100;
//Global loop
while (true)
{
auto CurMs = std::chrono::system_clock::now().time_since_epoch();
auto DeltaMs = CurMs - OldMs;
OldMs = CurMs;
//Cast delta time to float seconds
auto DeltaFloat = std::chrono::duration_cast<FloatSeconds>(DeltaMs);
std::cout << "Seconds passed since last update: " << DeltaFloat.count() << " seconds" << std::endl;
//Update all object by time as float value.
Update( DeltaFloat.count() );
// Sleep to give time for system interaction
usleep(SleepMicroseconds);
// Any other actions to calculate can be here
//...
}
return 0;
}
For this example in console you can see something like this:
Seconds passed since last update: 0.002685 seconds
Seconds passed since last update: 0.002711 seconds
Seconds passed since last update: 0.002619 seconds
Seconds passed since last update: 0.00253 seconds
Seconds passed since last update: 0.002509 seconds
Seconds passed since last update: 0.002757 seconds
Your time base logic seems to be incorrect, here's a sample code snippet. The speed of the object should be same irrespective of speed of the system. Instead of QueryPerformanceFrequency which is platform dependent, use std::chrono.
void animate(bool& stop)
{
static float speed = 1080/5; // = 1080px/ 5sec = 5sec to cross screen
static std::chrono::system_clock::time_point start = std::chrono::system_clock::now();
float fps;
int object_x = 1080;
while(!stop)
{
//calculate factional time
auto now = std::chrono::system_clock::now();
auto diff = now - start;
auto lapse_milli = std::chrono::duration_cast<std::chrono::milliseconds>(diff);
auto lapse_sec = lapse_milli.count()/1000;
//apply to object
int incr_x = speed * lapse_sec ;
object_x -= incr_x;
if( object_x <0) object_x = 1080;
// render object here
fps = lapse_milli.count()/1000;
//print fps
std::this_thread::sleep_for(std::chrono::milliseconds(100)); // change to achieve a desired fps rate
start = now;
}
}

issue regarding variable scope

I have trouble grasping the concept of variable scope. What is acceptable and what is not acceptable? I am aware that I've left out all of the graphics related code, and I am aware that I have an infinite game loop, but bear with me:
#include "LList.h"
#include "Snake.h"
#undef main
int main()
{
float dt; // time since last update.
int start_time;
bool paused = false;
float originalTime = 1.0f;
float timer = originalTime;
Snake p1Snake(10, false);
// Start the 'stopwatch'
start_time = SDL_GetTicks();
///////////////////////
// The 'game loop' //
///////////////////////
while (!done)
{
//////////////////////
// Update variables //
//////////////////////
// Update the dt value (to be the time since the last update)
dt = (SDL_GetTicks() - start_time) / 1000.0f;
start_time = SDL_GetTicks();
//increment the movement timer
timer-=dt;
if(timer<=0) When timer hits zero the snake is moved north.
{
p1Snake.goNorth();
timer = originalTimer; //reset timer.
}
}
return 0;
}
Okay! So my question is about the variable 'originalTimer'. It is out of scope where the timer is reset, so what can I do differently? Sorry if this is an extremely elementary question.
You used different names. originalTime and originalTimer
#include "LList.h"
#include "Snake.h"
#undef main
int main()
{
float dt; // time since last update.
int start_time;
bool paused = false;
float originalTimer = 1.0f; //Changed to originalTimer
float timer = originalTimer; //Changed to originalTimer
Snake p1Snake(10, false);
// Start the 'stopwatch'
start_time = SDL_GetTicks();
///////////////////////
// The 'game loop' //
///////////////////////
while (!done)
{
//////////////////////
// Update variables //
//////////////////////
// Update the dt value (to be the time since the last update)
dt = (SDL_GetTicks() - start_time) / 1000.0f;
start_time = SDL_GetTicks();
//increment the movement timer
timer-=dt;
if(timer<=0) //When timer hits zero the snake is moved north.
{
p1Snake.goNorth();
timer = originalTimer; //reset timer.
}
}
return 0;
}
May be typo, but there are two different variables originalTime and originalTimer
Changing in code below should work for you..
timer = originalTime; //reset timer.

Limiting Update Rate in C++. Why does this code update once a second not 60 times a second?

I am making a small game with C++ OpenGL. update() is normally called once every time the program runs through the code. I am trying to limit this to 60 times per second (I want the game to update at the same speed on different speed computers).
The code included below runs a timer and should call update() once the timer is >= than 0.0166666666666667 (60 times per second). However the statement if((seconds - lastTime) >= 0.0166666666666667) seems only to be tripped once per second. Does anyone know why?
Thanks in advance for your help.
//Global Timer variables
double secondsS;
double lastTime;
time_t timer;
struct tm y2k;
double seconds;
void init()
{
glClearColor(0,0,0,0.0); // Sets the clear colour to white.
// glClear(GL_COLOR_BUFFER_BIT) in the display function
//Init viewport
viewportX = 0;
viewportY = 0;
initShips();
//Time
lastTime = 0;
time_t timerS;
struct tm y2k;
y2k.tm_hour = 0; y2k.tm_min = 0; y2k.tm_sec = 0;
y2k.tm_year = 100; y2k.tm_mon = 0; y2k.tm_mday = 1;
time(&timerS); /* get current time; same as: timer = time(NULL) */
secondsS = difftime(timerS,mktime(&y2k));
printf ("%.f seconds since January 1, 2000 in the current timezone \n", secondsS);
loadTextures();
ShowCursor(true);
glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
}
void timeKeeper()
{
y2k.tm_hour = 0; y2k.tm_min = 0; y2k.tm_sec = 0;
y2k.tm_year = 100; y2k.tm_mon = 0; y2k.tm_mday = 1;
time(&timer); /* get current time; same as: timer = time(NULL) */
seconds = difftime(timer,mktime(&y2k));
seconds -= secondsS;
//Run 60 times a second. This limits updates to a constant standard.
if((seconds - lastTime) >= 0.0166666666666667)
{
lastTime = seconds;
update();
//printf ("%.f seconds since beginning program \n", seconds);
}
}
timeKeeper() is called in int WINAPI WinMain, while the program is !done
EDIT:
Thanks to those who helped, you pointed me on the right track. As mentioned in the answer below <ctime> does not have ms accuracy. I have therefore implemented the following code that has the correct accuracy:
double GetSystemTimeSample()
{
FILETIME ft1, ft2;
// assume little endian and that ULONGLONG has same alignment as FILETIME
ULONGLONG &t1 = *reinterpret_cast<ULONGLONG*>(&ft1),
&t2 = *reinterpret_cast<ULONGLONG*>(&ft2);
GetSystemTimeAsFileTime(&ft1);
do
{
GetSystemTimeAsFileTime(&ft2);
} while (t1 == t2);
return (t2 - t1) / 10000.0;
}//GetSystemTimeSample
void timeKeeper()
{
thisTime += GetSystemTimeSample();
cout << thisTime << endl;
//Run 60 times a second. This limits updates to a constant standard.
if(thisTime >= 16.666666666666699825) //Compare to a value in milliseconds
{
thisTime = seconds;
update();
}
}
http://www.cplusplus.com/reference/ctime/difftime/
Calculates the difference in seconds between beginning and end
So, you get a value in seconds. So, even if your value is double, you will get an integer.
So, you only get a difference between a value and the previous one when that difference is at least of 1 second.

Design fps limiter

I try to cap the animation at 30 fps. So I design the functions below to achieve the goal. Unfortunately, the animation doesn't behave as fast as no condition checking for setFPSLimit() function when I set 60 fps (DirectX caps game application at 60 fps by default). How should I fix it to make it work?
getGameTime() function counts the time like stopwatch in millisecond when game application starts.
//Called every time you need the current game time
float getGameTime()
{
UINT64 ticks;
float time;
// This is the number of clock ticks since start
if( !QueryPerformanceCounter((LARGE_INTEGER *)&ticks) )
ticks = (UINT64)timeGetTime();
// Divide by frequency to get the time in seconds
time = (float)(__int64)ticks/(float)(__int64)ticksPerSecond;
// Subtract the time at game start to get
// the time since the game started
time -= timeAtGameStart;
return time;
}
With fps limit
http://www.youtube.com/watch?v=i3VDOMqI6ic
void update()
{
if ( setFPSLimit(60) )
updateAnimation();
}
With No fps limit http://www.youtube.com/watch?v=Rg_iKk78ews
void update()
{
updateAnimation();
}
bool setFPSLimit(float fpsLimit)
{
// Convert fps to time
static float timeDelay = 1 / fpsLimit;
// Measure time elapsed
static float timeElapsed = 0;
float currentTime = getGameTime();
static float totalTimeDelay = timeDelay + getGameTime();
if( currentTime > totalTimeDelay)
{
totalTimeDelay = timeDelay + getGameTime();
return true;
}
else
return false;
}