Skip to content

Instantly share code, notes, and snippets.

@pubkey
Created September 20, 2023 15:10
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save pubkey/36651fdf20e4a9247b1b15b4f1cca5fb to your computer and use it in GitHub Desktop.
Save pubkey/36651fdf20e4a9247b1b15b4f1cca5fb to your computer and use it in GitHub Desktop.
/**
* Copy this file to the root of the RxDB repository
* so that the import paths are correct.
*/
import {
createRxDatabase,
randomCouchString,
requestIdlePromise
} from './dist/lib/index.js';
import * as assert from 'assert';
import { wait, randomString, randomNumber } from 'async-test-util';
import { getRxStorageMemory } from './dist/lib/plugins/storage-memory/index.js';
const schema = {
title: 'averageSchema_' + randomString(5), // randomisation used so hash differs
version: 0,
primaryKey: 'id',
type: 'object',
keyCompression: false,
properties: {
id: {
type: 'string',
maxLength: 100
},
var1: {
type: 'string',
maxLength: 100
},
var2: {
type: 'number',
minimum: 0,
maximum: 1000000,
multipleOf: 1
},
deep: {
type: 'object',
properties: {
deep1: {
type: 'string',
maxLength: 100
},
deep2: {
type: 'string'
}
}
},
list: {
type: 'array',
items: {
type: 'object',
properties: {
deep1: {
type: 'string'
},
deep2: {
type: 'string'
}
}
}
}
},
required: [
'id'
],
indexes: [
'var1',
'var2',
'deep.deep1',
// one compound index
[
'var2',
'var1'
]
],
sharding: {
shards: 6,
mode: 'collection'
}
};
function getDocData(
partial = {}
) {
return Object.assign(
{},
{
id: randomString(12),
var1: randomString(12),
var2: randomNumber(100, 50000),
deep: {
deep1: randomString(5),
deep2: randomString(8)
},
list: new Array(5).fill(0).map(() => ({
deep1: randomString(5),
deep2: randomString(8)
}))
},
partial
);
}
async function run() {
// wait a bit to ensure everything is loaded
await wait(2000);
const runs = 40;
const perfStorage = {
storage: getRxStorageMemory()
};
const totalTimes = {};
const collectionsAmount = 4;
const docsAmount = 1200;
const parallelQueryAmount = 4;
const insertBatches = docsAmount / 200;
let runsDone = 0;
while (runsDone < runs) {
console.log('runsDone: ' + runsDone + ' of ' + runs);
runsDone++;
let time = performance.now();
const updateTime = (flag) => {
if (!flag) {
time = performance.now();
return;
}
const diff = performance.now() - time;
if (!totalTimes[flag]) {
totalTimes[flag] = [diff];
} else {
totalTimes[flag].push(diff);
}
time = performance.now();
};
await awaitBetweenTest();
updateTime();
// create database
const db = await createRxDatabase({
name: 'test-db-performance-' + randomCouchString(10),
eventReduce: true,
/**
* A RxStorage implementation (like LokiJS)
* might need a full leader election cycle to be usable.
* So we disable multiInstance here because it would make no sense
* to measure the leader election time instead of the database
* creation time.
*/
multiInstance: false,
storage: perfStorage.storage
});
// create collections
const collectionData = {};
let firstCollectionName = '';
new Array(collectionsAmount)
.fill(0)
.forEach((_v, idx) => {
const name = randomCouchString(10) + '_' + idx;
if (!firstCollectionName) {
firstCollectionName = name;
}
collectionData[name] = {
schema: schema,
statics: {}
};
});
const collections = await db.addCollections(collectionData);
const collection = collections[firstCollectionName];
await collection.insert(getDocData());
updateTime('time-to-first-insert');
await awaitBetweenTest();
const docIds = [];
// insert documents
const docsData = new Array(docsAmount)
.fill(0)
.map((_v, idx) => {
const data = getDocData({
var1: (idx % 2) + '',
var2: idx % parallelQueryAmount
});
docIds.push(data.id);
return data;
});
await collection.bulkInsert(docsData);
await awaitBetweenTest();
/**
* Find by id,
* here we run the query against the storage because
* if we would do collection.findByIds(), it would
* just return the documents from the cache.
*
*/
updateTime();
const idsResult = await collection.storageInstance.findDocumentsById(docIds, false);
updateTime('find-by-ids');
assert.strictEqual(Object.keys(idsResult).length, docsAmount);
await awaitBetweenTest();
await db.remove();
}
const timeToLog = {
description: perfStorage.description,
collectionsAmount,
docsAmount
};
Object.entries(totalTimes).forEach(([key, times]) => {
timeToLog[key] = roundToTwo(averageOfTimeValues(times, 90));
});
console.log('Performance test for ' + perfStorage.description);
console.log(JSON.stringify(timeToLog, null, 4));
// process.exit();
}
export function averageOfTimeValues(
times,
striphighestXPercent
) {
times = times.sort((a, b) => a - b);
const stripAmount = Math.floor(times.length * (striphighestXPercent * 0.01));
const useNumbers = times.slice(0, times.length - stripAmount);
let total = 0;
useNumbers.forEach(nr => total = total + nr);
return total / useNumbers.length;
}
function roundToTwo(num) {
return +(Math.round(num + 'e+2') + 'e-2');
}
async function awaitBetweenTest() {
await requestIdlePromise();
await wait(100);
await requestIdlePromise();
}
run();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment