Adjusting server-side tickrate dynamically
- by Stuart Blackler
I know nothing of game development/this site, so I apologise if this is completely foobar.
Today I experimented with building a small game loop for a network game (think MW3, CSGO etc). I was wondering why they do not build in automatic rate adjustment based on server performance? Would it affect the client that much if the client knew this frame is based on this tickrate? Has anyone attempted this before?
Here is what my noobish C++ brain came up with earlier. It will improve the tickrate if it has been stable for x ticks. If it "lags", the tickrate will be reduced down by y amount:
// GameEngine.cpp : Defines the entry point for the console application.
//
#ifdef WIN32
#include <Windows.h>
#else
#include <sys/time.h>
#include <ctime>
#endif
#include<iostream>
#include <dos.h>
#include "stdafx.h"
using namespace std;
UINT64 GetTimeInMs()
{
#ifdef WIN32
/* Windows */
FILETIME ft;
LARGE_INTEGER li;
/* Get the amount of 100 nano seconds intervals elapsed since January 1, 1601 (UTC) and copy it
* to a LARGE_INTEGER structure. */
GetSystemTimeAsFileTime(&ft);
li.LowPart = ft.dwLowDateTime;
li.HighPart = ft.dwHighDateTime;
UINT64 ret = li.QuadPart;
ret -= 116444736000000000LL; /* Convert from file time to UNIX epoch time. */
ret /= 10000; /* From 100 nano seconds (10^-7) to 1 millisecond (10^-3) intervals */
return ret;
#else
/* Linux */
struct timeval tv;
gettimeofday(&tv, NULL);
uint64 ret = tv.tv_usec;
/* Convert from micro seconds (10^-6) to milliseconds (10^-3) */
ret /= 1000;
/* Adds the seconds (10^0) after converting them to milliseconds (10^-3) */
ret += (tv.tv_sec * 1000);
return ret;
#endif
}
int _tmain(int argc, _TCHAR* argv[])
{
int sv_tickrate_max = 1000; // The maximum amount of ticks per second
int sv_tickrate_min = 100; // The minimum amount of ticks per second
int sv_tickrate_adjust = 10; // How much to de/increment the tickrate by
int sv_tickrate_stable_before_increment = 1000; // How many stable ticks before we increase the tickrate again
int sys_tickrate_current = sv_tickrate_max; // Always start at the highest possible tickrate for the best performance
int counter_stable_ticks = 0; // How many ticks we have not lagged for
UINT64 __startTime = GetTimeInMs();
int ticks = 100000;
while(ticks > 0)
{
int maxTimeInMs = 1000 / sys_tickrate_current;
UINT64 _startTime = GetTimeInMs();
// Long code here...
cout << ".";
UINT64 _timeTaken = GetTimeInMs() - _startTime;
if(_timeTaken < maxTimeInMs)
{
Sleep(maxTimeInMs - _timeTaken);
counter_stable_ticks++;
if(counter_stable_ticks >= sv_tickrate_stable_before_increment)
{
// reset the stable # ticks counter
counter_stable_ticks = 0;
// make sure that we don't go over the maximum tickrate
if(sys_tickrate_current + sv_tickrate_adjust <= sv_tickrate_max)
{
sys_tickrate_current += sv_tickrate_adjust;
// let me know in console #DEBUG
cout << endl << "Improving tickrate. New tickrate: " << sys_tickrate_current << endl;
}
}
}
else if(_timeTaken > maxTimeInMs)
{
cout << endl;
if((sys_tickrate_current - sv_tickrate_adjust) > sv_tickrate_min)
{
sys_tickrate_current -= sv_tickrate_adjust;
}
else
{
if(sys_tickrate_current == sv_tickrate_min)
{
cout << "Please reduce sv_tickrate_min..." << endl;
}
else{
sys_tickrate_current = sv_tickrate_min;
}
}
// let me know in console #DEBUG
cout << "The server has lag. Reduced tickrate to: " << sys_tickrate_current << endl;
}
ticks--;
}
UINT64 __timeTaken = GetTimeInMs() - __startTime;
cout << endl << endl << "Total time in ms: " << __timeTaken;
cout << endl << "Ending tickrate: " << sys_tickrate_current;
char test;
cin >> test;
return 0;
}