Hello everyone,
I am working at my first Unity project which is a simple race game.
Now I encountered that my time measurement is not very exact and varies from run to run. Even though only within the milliseconds, but if I want to create a highscore list, that can make a relevant difference.
My test track is the following:
-
A short straight track
-
“car” accelerates on its own, so no user interaction which might cause a delay
-
the time measurement starts after a 3 seconds count down (after the car starts the acceleration).
-
the time is stopped after hitting (collision) the finishing line (which is an obstacle in my case).
The observed time span with this setting is between 2:50 sec and 2:80 sec, so quite huge.
With my little knowledge about Unity I assume that it could have to do something with how and especially when unity calls the different methodes, in my case the time span between 2 update() calls. Am I on the right track?
Anyhow, how can I get a precise time measurement?
Here is my stopwatch script:
public class Stopwatch : MonoBehaviour
{
float timer;
float msec;
float sec;
float min;
[SerializeField] Text txt_stopWatch;
bool doCalculation;
// Start is called before the first frame update
void Start()
{
doCalculation = false;
timer = 0;
}
// Update is called once per frame
void Update()
{
if (doCalculation)
{
StopWatchCalculation();
}
}
void StopWatchCalculation()
{
timer += Time.deltaTime;
//example: timer = 1.3375 | int(timer) = 1 | (int)((timer - (int)timer) * 1000) = 337
msec = (int)((timer - (int)timer) * 1000);
sec = (int)(timer % 60);
min = (int)(timer / 60 % 60);
txt_stopWatch.text = string.Format("{0:00}:{1:00}:{2:00}", min, sec, msec);
}
public static string GimmeTimeFormat(float time)
{
float tmpMsec = (int)((time - (int)time) * 1000);
float tmpSec = (int)(time % 60);
float tmpMin = (int)(time / 60 % 60);
return string.Format("{0:00}:{1:00}:{2:00}", tmpMin, tmpSec, tmpMsec);
}
public void StartStopwatch()
{
doCalculation = true;
}
public void StopStopwatch()
{
doCalculation = false;
}