Skip to content

Instantly share code, notes, and snippets.

@oliverde8
Last active August 31, 2020 12:30
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save oliverde8/3abcf5ffb3397454fefee1d9f3ff3b1e to your computer and use it in GitHub Desktop.
Save oliverde8/3abcf5ffb3397454fefee1d9f3ff3b1e to your computer and use it in GitHub Desktop.
<?php
/**
* @author De Cramer Oliver <oliverde8@gmail.com>
*/
// ################
// CONFIG
// ################
$nbDataToInsert = 100000; // Number of lines of data to insert to make the test.
$pageSize = 10; // The size of a single page.
$stopAt = 1000; // Stop when a page takes over this amount of ms to load.
$writeAtEach = 100;
// ################
// Initialize the data
// ################
$connection = new PDO("mysql:host=db;dbname=test", 'root', 'root');
echo "Step 1: Initialize the test data.\n";
$query = 'DROP TABLE IF EXISTS test';
$connection->exec($query);
$query = <<<SQL
CREATE TABLE test(
col1 INT AUTO_INCREMENT PRIMARY KEY,
col2 VARCHAR(200)
) ENGINE=INNODB;
SQL;
$connection->exec($query);
for ($i = 0; $i < $nbDataToInsert; $i += 10000) {
$query = "INSERT INTO test (col2) VALUES ('INERTING $i - 0')";
for ($ii = 1; $ii < 10000; $ii++) {
$query .= ", ('PART OF PACKAGE $i - $ii')";
}
$connection->exec($query);
}
################
Run tests
################
echo "Step 2: Generate data for optimized pagination.\n";
$csv = fopen('pagination_optimized.csv', 'w+');
fputcsv($csv, ['page', 'time_to_load']);
$startId = 0;
$total = 0;
for ($page = 0; $page < $nbDataToInsert/$pageSize; $page++) {
$startTime = microtime(true);
$query = "SELECT * FROM test WHERE col1 > $startId ORDER BY col1 ASC LIMIT $pageSize";
$query = $connection->query($query);
while ($result = $query->fetch(PDO::FETCH_ASSOC)) {
$startId = $result['col1'];
}
$time = microtime(true) - $startTime;
$total += $time;
if ($page%$writeAtEach == 0) {
echo "\tPage: $page\n";
fputcsv($csv, [$page, (int)($time * 1000)]);
}
if (($time * 1000) > $stopAt) {
echo "Page to slow stopping process from here on.\n";
break;
}
}
fputcsv($csv, ["total", (int) ($total * 1000)]);
echo "Step 3: Generate data for classic pagination.\n";
$csv = fopen('pagination_classic.csv', 'w+');
fputcsv($csv, ['page', 'time_to_load']);
$total = 0;
for ($page = 0; $page < $nbDataToInsert/$pageSize; $page++) {
$startTime = microtime(true);
$offset = $page * $pageSize;
$querysql = "SELECT * FROM test ORDER BY col1 ASC LIMIT $offset, $pageSize";
$query = $connection->query($querysql);
while ($result = $query->fetch(PDO::FETCH_ASSOC)) {
// Do nothing just reading the data.
}
$time = microtime(true) - $startTime;
$total += $time;
if ($page%$writeAtEach == 0) {
echo "\tPage: $page\n";
fputcsv($csv, [$page, (int)($time * 1000)]);
}
if ($time > $stopAt) {
echo "Page to slow stopping process from here on.\n";
break;
}
}
fputcsv($csv, ["total", (int) ($total * 1000)]);
echo "Step 3: Generate data for wrong pagination.\n";
$csv = fopen('pagination_wrong.csv', 'w+');
fputcsv($csv, ['page', 'time_to_load']);
$total = 0;
for ($page = 0; $page < $nbDataToInsert/$pageSize; $page++) {
$startTime = microtime(true);
$offset = $page * $pageSize;
$querysql = "SELECT * FROM test LIMIT $offset, $pageSize";
$query = $connection->query($querysql);
while ($result = $query->fetch(PDO::FETCH_ASSOC)) {
// Do nothing just reading the data.
}
$time = microtime(true) - $startTime;
$total += $time;
if ($page%$writeAtEach == 0) {
echo "\tPage: $page\n";
fputcsv($csv, [$page, (int)($time * 1000)]);
}
if ($time > $stopAt) {
echo "Page to slow stopping process from here on.\n";
break;
}
}
fputcsv($csv, ["total", (int) ($total * 1000)]);
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment