Skip to content

Instantly share code, notes, and snippets.

@SnopyDogy
Last active December 16, 2015 04:59
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save SnopyDogy/5380968 to your computer and use it in GitHub Desktop.
Save SnopyDogy/5380968 to your computer and use it in GitHub Desktop.
A Class for testing the performance of code in isolation. Useful for testing one algorithm against another in a controlled environment. Includes a main function sowing example usage (it testes for and for each loop performance).
////////////////////////////////////////////////////////////
/// @file main.cpp
/// @details A small sample program to used to run performacne tests.
/// @author Greg Nott
/// @version 1.0 - Initial version.
/// @date 14/04/13
////////////////////////////////////////////////////////////
#include <vector>
#include <list>
#include <stdio.h>
#include <math.h>
#include <stdlib.h>
#include <time.h>
#include <Windows.h>
#include "PerformanceTester.h"
std::vector<int> g_aTestVector;
std::vector<int>::iterator g_vEnd;
std::vector<int>::iterator g_vItr;
std::list<int> g_lTestlist;
std::list<int>::iterator g_End;
std::list<int>::iterator g_Itr;
int g_iTestValue = 0;
#ifdef _DEBUG
int g_iTestItrations = 100000;
#else
int g_iTestItrations = 10000000;
#endif
bool ResetVector();
bool ResetList();
bool For_vSetTo0_Sub();
bool For_vSetTo0_Itr();
bool ForEach_vSetTo0();
bool For_lSetTo0();
bool ForEach_lSetTo0();
bool For_vSetToRand_Sub();
bool For_vSetToRand_Itr();
bool ForEach_vSetToRand();
bool For_lSetToRand();
bool ForEach_lSetToRand();
bool For_vNoAccess_Sub();
bool For_vNoAccess_Itr();
bool ForEach_vNoAccess();
bool For_lNoAccess();
bool ForEach_lNoAccess();
int main()
{
/* initialize random seed: */
srand ( time(NULL) );
// init test list:
for (int i = 0; i < g_iTestItrations; ++i)
{
g_lTestlist.push_back(0);
}
// init test Vector:
g_aTestVector.reserve(g_iTestItrations);
for (int i = 0; i < g_iTestItrations; ++i)
{
g_aTestVector.push_back(0);
}
printf("Press enter to Start.\n\n");
getchar();
// using this to print:
PerformanceTester oTest1(nullptr, 1, true);
// Setup Tests:
PerformanceTester oFor_vSetTo0_Sub(&For_vSetTo0_Sub, 1000, true);
PerformanceTester oFor_vSetTo0_Itr(&For_vSetTo0_Itr, 1000, true);
PerformanceTester oForEach_vSetTo0(&ForEach_vSetTo0, 1000, true);
PerformanceTester oFor_lSetTo0(&For_lSetTo0, 1000, true);
PerformanceTester oForEach_lSetTo0(&ForEach_lSetTo0, 1000, true);
PerformanceTester oFor_vSetToRand_Sub(&For_vSetToRand_Sub, 1000, true);
PerformanceTester oFor_vSetToRand_Itr(&For_vSetToRand_Itr, 1000, true);
PerformanceTester oForEach_vSetToRand(&ForEach_vSetToRand, 1000, true);
PerformanceTester oFor_lSetToRand(&For_lSetToRand, 1000, true);
PerformanceTester oForEach_lSetToRand(&ForEach_lSetToRand, 1000, true);
PerformanceTester oFor_vNoAccess_Sub(&For_vNoAccess_Sub, 1000, true);
PerformanceTester oFor_vNoAccess_Itr(&For_vNoAccess_Itr, 1000, true);
PerformanceTester oForEach_vNoAccess(&ForEach_vNoAccess, 1000, true);
PerformanceTester oFor_lNoAccess(&For_lNoAccess, 1000, true);
PerformanceTester oForEach_lNoAccess(&ForEach_lNoAccess, 1000, true);
oTest1.Print("============================================================================");
oTest1.Print("Preforming 'Set To Zero' Test - Vector container Type" );
oTest1.Print("============================================================================");
ResetVector();
oFor_vSetTo0_Sub.SetResetFunct(&ResetVector);
oFor_vSetTo0_Sub.Run();
ResetVector();
oFor_vSetTo0_Itr.SetResetFunct(&ResetVector);
oFor_vSetTo0_Itr.Run();
ResetVector();
oForEach_vSetTo0.SetResetFunct(&ResetVector);
oForEach_vSetTo0.Run();
oTest1.Print("============================================================================");
oTest1.Print("Preforming 'Set To Zero' Test - List container Type" );
oTest1.Print("============================================================================");
ResetList();
oFor_lSetTo0.SetResetFunct(&ResetList);
oFor_lSetTo0.Run();
ResetList();
oForEach_lSetTo0.SetResetFunct(&ResetList);
oForEach_lSetTo0.Run();
oTest1.Print("============================================================================");
oTest1.Print("Preforming 'Set To Rand' Test - Vector container Type" );
oTest1.Print("============================================================================");
ResetVector();
oFor_vSetToRand_Sub.SetResetFunct(&ResetVector);
oFor_vSetToRand_Sub.Run();
ResetVector();
oFor_vSetToRand_Itr.SetResetFunct(&ResetVector);
oFor_vSetToRand_Itr.Run();
ResetVector();
oForEach_vSetToRand.SetResetFunct(&ResetVector);
oForEach_vSetToRand.Run();
oTest1.Print("============================================================================");
oTest1.Print("Preforming 'Set To Rand' Test - List container Type" );
oTest1.Print("============================================================================");
ResetList();
oFor_lSetToRand.SetResetFunct(&ResetList);
oFor_lSetToRand.Run();
ResetList();
oForEach_lSetToRand.SetResetFunct(&ResetList);
oForEach_lSetToRand.Run();
oTest1.Print("============================================================================");
oTest1.Print("Preforming 'No Access' Test - Vector container Type" );
oTest1.Print("============================================================================");
ResetVector();
oFor_vNoAccess_Sub.SetResetFunct(&ResetVector);
oFor_vNoAccess_Sub.Run();
ResetVector();
oFor_vNoAccess_Itr.SetResetFunct(&ResetVector);
oFor_vNoAccess_Itr.Run();
ResetVector();
oForEach_vNoAccess.SetResetFunct(&ResetVector);
oForEach_vNoAccess.Run();
oTest1.Print("============================================================================");
oTest1.Print("Preforming 'No Access' Test - List container Type" );
oTest1.Print("============================================================================");
ResetList();
oFor_lNoAccess.SetResetFunct(&ResetList);
oFor_lNoAccess.Run();
ResetList();
oForEach_lNoAccess.SetResetFunct(&ResetList);
oForEach_lNoAccess.Run();
oTest1.Print("============================================================================");
oTest1.Print("Test Results:" );
oTest1.Print("============================================================================");
oTest1.Print("For Loop 'Set To Zero', Vector Container, Subscript Access Result: ");
oFor_vSetTo0_Sub.PrintResults();
oTest1.Print("For Loop 'Set To Zero', Vector Container, Iterator Access Result: ");
oFor_vSetTo0_Itr.PrintResults();
oTest1.Print("For Each Loop 'Set To Zero', Vector Container: ");
oForEach_vSetTo0.PrintResults();
oTest1.Print("For Loop 'Set To Zero', List Container Result: ");
oFor_lSetTo0.PrintResults();
oTest1.Print("For Each Loop 'Set To Zero', List Container: ");
oForEach_lSetTo0.PrintResults();
oTest1.Print("For Loop 'Set To Rand', Vector Container, Subscript Access Result: ");
oFor_vSetToRand_Sub.PrintResults();
oTest1.Print("For Loop 'Set To Rand', Vector Container, Iterator Access Result: ");
oFor_vSetToRand_Itr.PrintResults();
oTest1.Print("For Each Loop 'Set To Rand', Vector Container: ");
oForEach_vSetToRand.PrintResults();
oTest1.Print("For Loop 'Set To Rand', List Container Result: ");
oFor_lSetToRand.PrintResults();
oTest1.Print("For Each Loop 'Set To Rand', List Container: ");
oForEach_lSetToRand.PrintResults();
oTest1.Print("For Loop 'No Access', Vector Container, Subscript Access Result: ");
oFor_vNoAccess_Sub.PrintResults();
oTest1.Print("For Loop 'No Access', Vector Container, Iterator Access Result: ");
oFor_vNoAccess_Itr.PrintResults();
oTest1.Print("For Each Loop 'No Access', Vector Container: ");
oForEach_vNoAccess.PrintResults();
oTest1.Print("For Loop 'No Access', List Container Result: ");
oFor_lNoAccess.PrintResults();
oTest1.Print("For Each Loop 'No Access', List Container: ");
oForEach_lNoAccess.PrintResults();
oTest1.Print("Press enter to close");
getchar();
return 0;
}
bool For_vSetTo0_Sub()
{
for (int i = 0; i < g_iTestItrations; ++i)
{
g_aTestVector[i] = 0;
}
return true;
}
bool For_vSetTo0_Itr()
{
for (; g_vItr < g_vEnd; ++g_vItr)
{
*g_vItr = 0;
}
return true;
}
bool ForEach_vSetTo0()
{
for each (auto Int in g_aTestVector)
{
Int = 0;
}
return true;
}
bool For_lSetTo0()
{
for(; g_Itr != g_End; ++g_Itr)
{
*g_Itr = 0;
}
return true;
}
bool ForEach_lSetTo0()
{
for each (auto Itr in g_lTestlist)
{
Itr = 0;
}
return true;
}
bool For_vSetToRand_Sub()
{
for (int i = 0; i < g_iTestItrations; ++i)
{
g_aTestVector[i] = rand();
}
return true;
}
bool For_vSetToRand_Itr()
{
for (; g_vItr < g_vEnd; ++g_vItr)
{
*g_vItr = rand();
}
return true;
}
bool ForEach_vSetToRand()
{
for each (auto Int in g_aTestVector)
{
Int = rand();
}
return true;
}
bool For_lSetToRand()
{
for(; g_Itr != g_End; ++g_Itr)
{
*g_Itr = rand();
}
return true;
}
bool ForEach_lSetToRand()
{
for each (auto Itr in g_lTestlist)
{
Itr = rand();
}
return true;
}
bool For_vNoAccess_Sub()
{
for (int i = 0; i < g_iTestItrations; ++i)
{
g_iTestValue = 0;
}
return true;
}
bool For_vNoAccess_Itr()
{
for (; g_vItr < g_vEnd; ++g_vItr)
{
g_iTestValue = 0;
}
return true;
}
bool ForEach_vNoAccess()
{
for each (auto Int in g_aTestVector)
{
g_iTestValue = 0;
}
return true;
}
bool For_lNoAccess()
{
g_End = g_lTestlist.end();
g_Itr = g_lTestlist.begin();
for(; g_Itr != g_End; ++g_Itr)
{
g_iTestValue = 0;
}
return true;
}
bool ForEach_lNoAccess()
{
for each (auto Itr in g_lTestlist)
{
g_iTestValue = 0;
}
return true;
}
bool ResetVector()
{
g_vEnd = g_aTestVector.end();
g_vItr = g_aTestVector.begin();
return true;
}
bool ResetList()
{
g_End = g_lTestlist.end();
g_Itr = g_lTestlist.begin();
return true;
}
////////////////////////////////////////////////////////////
/// @file PerformanceTester.cpp
/// @author Greg Nott
/// @version 1.0 - Initial Version
/// @date 22/3/13
////////////////////////////////////////////////////////////
#include "PerformanceTester.h"
#include <stdio.h>
HANDLE PerformanceTester::m_hstdout = GetStdHandle( STD_OUTPUT_HANDLE );
PerformanceTester::PerformanceTester(TestCode a_fpCodeToTest, unsigned int a_uiNoOfTests, bool a_bVerbose)
: m_fpCodeToTest(a_fpCodeToTest)
, m_uiNoOfTests(a_uiNoOfTests)
, m_bVerbose(a_bVerbose)
{
m_poTestTimes = new TestTimes[m_uiNoOfTests];
m_dAverageElapsedTime = 0;
m_dMaxElapsedTime = 0;
m_dMinElapsedTime = 0;
m_dAverageElapsedCycles = 0;
m_dMaxElapsedCycles = 0;
m_dMinElapsedCycles = 0;
m_fElapsedTimeAccuracy = 0;
m_fElapsedCyclesAccuracy = 0;
m_eTimeResolution = MILISECONDS;
m_dOverhead = 0.0;
m_fpResetTestCode = nullptr;
// This Gets our Performance Frequency, it will set the fail state to true if it is uncessfull.
m_bFailure = !(QueryPerformanceFrequency(&(m_iFrequency)));
if (m_bFailure)
{
Print("Error Occured: Could not get processor Frequency.", FAIL);
}
// Now we can work out the QPC API Overhead!
LARGE_INTEGER start;
LARGE_INTEGER end;
bool result;
TestCode Test = OverheadTest;
for (int i = 0; i < 100; ++i)
{
QueryPerformanceCounter(&start);
result = Test();
QueryPerformanceCounter(&end);
m_dOverhead += double(end.QuadPart - start.QuadPart);
}
m_dOverhead /= 100.0; // use an average for accuracy!
}
PerformanceTester::~PerformanceTester()
{
delete[] m_poTestTimes;
}
bool PerformanceTester::Run()
{
char caBuffer[256];
bool bResult = true;
// if the fail state is true then we have a problem and cant run!
if (m_bFailure)
{
Print("Error Occured: Could not Run the Performance Test.", FAIL);
return m_bFailure;
}
// test that our test code function popinter is valis:
if (m_fpCodeToTest == 0)
{
Print("Error Occured: No Test Code Specified.", FAIL);
return m_bFailure;
}
unsigned int Itr = 0;
for (Itr = 0; Itr < m_uiNoOfTests; ++Itr)
{
m_bFailure &= QueryPerformanceCounter(&(m_poTestTimes[Itr].m_Start));
// Call the test code:
bResult = m_fpCodeToTest();
m_bFailure &= QueryPerformanceCounter(&(m_poTestTimes[Itr].m_End));
if (!bResult)
{
Print("Error Occured: Test code did not exacute correctly.", FAIL);
return (m_bFailure &= !bResult);
}
if (m_bFailure)
{
Print("Error Occured: Could not Get Performance Counters, Aborting further testing!", FAIL);
return m_bFailure;
}
// compute the elapsed time in milliseconds
m_poTestTimes[Itr].m_dElapsedCycles = double(m_poTestTimes[Itr].m_End.QuadPart - m_poTestTimes[Itr].m_Start.QuadPart) - m_dOverhead;
m_poTestTimes[Itr].m_dElapsedTime = m_poTestTimes[Itr].m_dElapsedCycles / double(m_iFrequency.QuadPart / (double)m_eTimeResolution);
// Print the result.
sprintf(caBuffer, "Test Run %i Completed, Number of CPU Cycles = %i, Time Elapsed = %f", Itr, (unsigned int)(m_poTestTimes[Itr].m_dElapsedCycles), m_poTestTimes[Itr].m_dElapsedTime);
Print(caBuffer);
// Reset Test Code for next run!
if (m_fpResetTestCode != nullptr)
{
bResult = m_fpResetTestCode();
if (!bResult)
{
Print("Error Occured: Reset Function did not exacute correctly.", FAIL);
return (m_bFailure &= !bResult);
}
}
}
// Compute the averages!
m_dMinElapsedTime = m_poTestTimes[0].m_dElapsedTime;
m_dMinElapsedCycles = m_poTestTimes[0].m_dElapsedCycles;
for (Itr = 0; Itr < m_uiNoOfTests; ++Itr)
{
m_dAverageElapsedTime += m_poTestTimes[Itr].m_dElapsedTime;
if (m_poTestTimes[Itr].m_dElapsedTime > m_dMaxElapsedTime)
{
m_dMaxElapsedTime = m_poTestTimes[Itr].m_dElapsedTime;
}
if (m_poTestTimes[Itr].m_dElapsedTime < m_dMinElapsedTime)
{
m_dMinElapsedTime = m_poTestTimes[Itr].m_dElapsedTime;
}
m_dAverageElapsedCycles += m_poTestTimes[Itr].m_dElapsedCycles;
if (m_poTestTimes[Itr].m_dElapsedCycles > m_dMaxElapsedCycles)
{
m_dMaxElapsedCycles = m_poTestTimes[Itr].m_dElapsedCycles;
}
if (m_poTestTimes[Itr].m_dElapsedCycles < m_dMinElapsedCycles)
{
m_dMinElapsedCycles = m_poTestTimes[Itr].m_dElapsedCycles;
}
}
m_dAverageElapsedTime /= double(m_uiNoOfTests);
m_dAverageElapsedCycles /= double(m_uiNoOfTests);
if (m_dAverageElapsedTime != 0)
{
m_fElapsedTimeAccuracy = float( (m_dMaxElapsedTime - m_dMinElapsedTime) / m_dAverageElapsedTime * 100.0 ); // Percentage!
}
if (m_dAverageElapsedCycles != 0)
{
m_fElapsedCyclesAccuracy = float( (m_dMaxElapsedCycles - m_dMinElapsedCycles) / m_dAverageElapsedCycles * 100.0 ); // Percentage!
}
PrintResults();
return m_bFailure;
}
void PerformanceTester::Print(char* a_pcText, Colour a_eColour)
{
if (m_bVerbose)
{
SetConsoleTextAttribute( m_hstdout, a_eColour );
printf("%s\n", a_pcText);
}
}
void PerformanceTester::PrintResults()
{
if (m_bFailure)
{
SetConsoleTextAttribute( m_hstdout, FAIL );
printf("UNKNOWN ERROR OCCURED: no test results found\n");
}
else
{
SetConsoleTextAttribute( m_hstdout, PASS );
printf("All Tests Completed\n");
switch (m_eTimeResolution)
{
case SECONDS:
printf("All Times in Seconds\n");
break;
case MICROSECONDS:
printf("All Times in Microseconds\n");
break;
default:
printf("All Times in Miliseconds\n");
break;
}
printf("Note: the following measurments have been adjusted for the QPC overhead of %i CPU cycles.\n", (int)m_dOverhead);
printf("Average Elapsed Time: %f\n", m_dAverageElapsedTime);
printf("Min/Max Elapsed Time: %f/%f\n", m_dMinElapsedCycles / (m_iFrequency.QuadPart / (int)m_eTimeResolution), m_dMaxElapsedCycles / (m_iFrequency.QuadPart / (int)m_eTimeResolution));
printf("Varience %f%%\n", m_fElapsedTimeAccuracy);
printf("Average Elapsed CPU Cycles: %i\n", (unsigned int)(m_dAverageElapsedCycles));
printf("Min/Max Elapsed CPU Cycles: %i/%i\n", (unsigned int)(m_dMinElapsedCycles), (unsigned int)(m_dMaxElapsedCycles));
printf("Varience %f%%\n\n", m_fElapsedCyclesAccuracy);
}
}
bool PerformanceTester::OverheadTest()
{
return true;
}
////////////////////////////////////////////////////////////
/// @file PerformanceTester.h
/// @details A class used to performance test code. the code must be wrapred in a function with the following prototype:
/// \code
/// bool FunctionName();
/// \endcode
/// @author Greg Nott
/// @version 1.0 - Initial Version
/// @date 22/3/13
////////////////////////////////////////////////////////////
#ifndef _PERFORMANCETESTER_H_
#define _PERFORMANCETESTER_H_
#include <Windows.h>
typedef bool (*TestCode)(); ///< A typedef defining a function Pointer. Used to point to the code to Test.
class PerformanceTester
{
public:
enum Colour
{
PASS = 0x000A,
FAIL = 0x000C,
NORMAL = 0x000F
};
enum Resolution
{
SECONDS = 1,
MILISECONDS = 1000,
MICROSECONDS = 1000000,
};
PerformanceTester(TestCode a_fpCodeToTest, unsigned int a_uiNoOfTests, bool a_bVerbose);
~PerformanceTester();
double GetAverageElapsedTime() const { return m_dAverageElapsedTime; }
double GetMaxElapsedTime() const { return m_dMaxElapsedTime; }
double GetMinElapsedTime() const { return m_dMinElapsedTime; }
double GetAverageElapsedCycles() const { return m_dAverageElapsedCycles; }
double GetMaxElapsedCycles() const { return m_dMaxElapsedCycles; }
double GetMinElapsedCycles() const { return m_dMinElapsedCycles; }
Resolution GetTimeResolution() const { return m_eTimeResolution; }
void SetTimeResolution(Resolution a_eTimeResolution) { m_eTimeResolution = a_eTimeResolution;}
unsigned int GetNoOfTests() const { return m_uiNoOfTests; }
void SetNoOfTests(unsigned int a_uiNoOfTests) { m_uiNoOfTests = a_uiNoOfTests; }
void SetResetFunct(TestCode a_fpResetCode) { m_fpResetTestCode = a_fpResetCode; }
void SetTestFunct(TestCode a_fpTestCode) { m_fpCodeToTest = a_fpTestCode; }
bool Run();
void Print(char* a_pcText, Colour a_eColour = NORMAL);
void PrintResults(); ///< Prints the results of the last test run.
protected:
private:
struct TestTimes ///< This struct contains the results of a performance test.
{
LARGE_INTEGER m_Start; ///< The result of QueryPerformanceCounter() at the start of the test.
LARGE_INTEGER m_End; ///< The result of QueryPerformanceCounter() at the end of the test.
double m_dElapsedCycles; ///< The Number of elapsed CPU cycles.
double m_dElapsedTime; ///< The elapsed time for the test in milisecods, calculated from m_Start and m_End.
};
Resolution m_eTimeResolution; ///< The Resolution to display the resulting times in.
TestCode m_fpCodeToTest; ///< A pointer to a function containg the code to be tested!
TestCode m_fpResetTestCode; ///< A pointer to a function to be used to reset test code to an initial state!
TestTimes* m_poTestTimes; ///< Pointer to the result of the tests.
unsigned int m_uiNoOfTests; ///< The number of times the test should be run.
double m_dAverageElapsedTime; ///< The average elasped time for each test, this should be considerd the final result.
double m_dMaxElapsedTime; ///< The Maximum elasped time for the test run.
double m_dMinElapsedTime; ///< The Minimum elasped time for the test run.
double m_dAverageElapsedCycles; ///< The average elasped CPU Cycles for each test, this should be considerd the final result.
double m_dMaxElapsedCycles; ///< The Maximum elasped CPU Cycles for the test run.
double m_dMinElapsedCycles; ///< The Minimum elasped CPU Cycles for the test run
float m_fElapsedTimeAccuracy; ///< The Accuracy of the Average Time Elasped, expressed as a percentage.
float m_fElapsedCyclesAccuracy; ///< The Accuracy of the Average CPU Cycles Elasped, expressed as a percentage.
bool m_bFailure; ///< if true then a failure has occured
bool m_bVerbose; ///< Will Print out the status of the tests, including the current run No, results.
LARGE_INTEGER m_iFrequency; ///< the Current Performance Frequency, i.e. the Process Frequency.
double m_dOverhead; ///< Ther overhead involved in calling QPC, this will be subtrcted from results to ensure accurate measurments.
static HANDLE m_hstdout; ///< Console Outpu Handle.
static bool OverheadTest(); ///< used to get a more accurate read on the frameworks testing overhead.
};
#endif // _PERFORMANCETESTER_H_
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment