Created
August 10, 2019 14:53
-
-
Save jstine35/840177b1874b928a63350c40655611b7 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#include <chrono> | |
// Use some clever syntax to allow GetProcessTimeInSeconds() to work from pre-main() initalizers. | |
// (in some cases the OS might init s_ProcessStartTimeSecs as 0 prior to invoking the runtime initializer | |
// that gives it a real value... but probably not. Almost all operating systems will init the boolean | |
// to zero prior to init() however, so that's what I do this little juggle). | |
bool s_ProcessStartInit = 0; | |
double s_ProcessStartTimeSecs = ((s_ProcessStartInit=1), std::chrono::duration_cast<std::chrono::duration<double>>(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); | |
double GetProcessTimeInSeconds() | |
{ | |
// Windows don't really have a concept of "process time" eg. individual processes cannot be suspended/resumed | |
// individually. Suspend/resume (aka, sleep) occurs on a system-wide scale. So it should be fine enough to | |
// return whatever std::chrono::high_resolution_clock queries. | |
// STL chrono is the most retarded thing ever. who the fuck cares about all this bullshit implicit conversion and | |
// billions of units of measuremenets and ratios and under-the-hood conversion rounding that will make everyone's | |
// life suck? Just convert to double, normalized to seconds, and then the world of time-keeping is easy. --jstine | |
if (!s_ProcessStartInit) return 0; | |
return std::chrono::duration_cast<std::chrono::duration<double>>(std::chrono::high_resolution_clock::now().time_since_epoch()).count() - s_ProcessStartTimeSecs; | |
} | |
double GetProcessTimeInMsec() | |
{ | |
// ... apparently because specifying std:milli as a template parameter is more readable and makes more sense than | |
// taking a double result and multiplying it by 0.001. (/sarcasm) | |
// | |
// Why is chrono so overly optimized for integer maths? I mean I know the original API was developed in the late | |
// 90's as part of BOOST when AMD still had a shit FPU unit and you needed to do all your maths as integers to | |
// avoid grotesque bottlenecks. But why is this part of the STL standard? It's so wierdly hyper-optimized for | |
// solving specific problems to specific (now out-dated) platforms. Even mobile has no trouble with double- | |
// precision FPUs. --jstine | |
if (!s_ProcessStartInit) return 0; | |
return std::chrono::duration_cast<std::chrono::duration<double, std::milli>>(std::chrono::high_resolution_clock::now().time_since_epoch()).count() - (s_ProcessStartTimeSecs*1000); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment