Skip to content

Instantly share code, notes, and snippets.

@maxime-helen-sb
Last active June 18, 2017 06:52
Show Gist options
  • Save maxime-helen-sb/368df5917bb74e3dac625e1f1fbc75c2 to your computer and use it in GitHub Desktop.
Save maxime-helen-sb/368df5917bb74e3dac625e1f1fbc75c2 to your computer and use it in GitHub Desktop.
Search index compliance tests
'use strict';
const search_index = require("search-index");
const fs = require("fs-extra");
const should = require("should");
const Path = require("path");
function promisify(f) {
var g = function () {
var args = Array.prototype.slice.call(arguments);
return new Promise(function (fulfill, reject) {
var callback = function (err) {
if (err) {return reject(err);}
var args2 = Array.prototype.slice.call(arguments, 1);
fulfill.apply(undefined, args2);
};
args.push(callback);
f.apply(undefined, args);
});
};
return g;
}
const relative_path_to_keystore = ".search_index_keystore";
const dir_path = process.cwd().replace(/\\/g, '/');
const tmp_path = Path.join(dir_path, 'tmp', 'search-index');
let database, doc_id;
const ref_doc = {
main: 'test.assembly'
};
before('Create index', done => {
fs.mkdirsSync(tmp_path);
search_index({
indexPath: Path.join(tmp_path, relative_path_to_keystore),
separator: /[\|' ,\-|(\n)]+/
}, (err, db) => {
database = db;
done(err);
});
});
describe('Search-index', () => {
let search_results = [];
const total_docs = 1000; // this number sets how many concurrent calls we will make
const create_empty_array = length => Array.apply(null, Array(length));
it('Concurrent document additions', done => {
const insert_addition_promise = (item, index) => {
const document = JSON.parse(JSON.stringify(ref_doc)); // deep copy
document.count = index;
return promisify(database.concurrentAdd)({}, [document]);
};
const promises = Promise.all(create_empty_array(total_docs).map(insert_addition_promise));
promises
.then(() => done())
.catch(done);
});
it('Search concurrently the documents', done => {
let count = 0;
database.search({
query: {
AND: {
"main": [
ref_doc.main
]
}
},
pageSize: total_docs
})
.on('data', hit => {
count ++;
search_results.push(hit.id);
})
.on('error', done)
.on('end', () => {
should.equal(count, total_docs);
done();
});
});
});
after('Close database', done => {
database.close(done);
});
after('Cleanup tmp directory', done => {
fs.remove(tmp_path, done);
});
'use strict';
const search_index = require("search-index");
const fs = require("fs-extra");
const should = require("should");
const Path = require("path");
function promisify(f) {
var g = function () {
var args = Array.prototype.slice.call(arguments);
return new Promise(function (fulfill, reject) {
var callback = function (err) {
if (err) {return reject(err);}
var args2 = Array.prototype.slice.call(arguments, 1);
fulfill.apply(undefined, args2);
};
args.push(callback);
f.apply(undefined, args);
});
};
return g;
}
const relative_path_to_keystore = ".search_index_keystore";
const dir_path = process.cwd().replace(/\\/g, '/');
const tmp_path = Path.join(dir_path, 'tmp', 'search-index');
let database, doc_id;
const ref_doc = {
main: 'test.assembly'
};
before('Create index', done => {
fs.mkdirsSync(tmp_path);
search_index({
indexPath: Path.join(tmp_path, relative_path_to_keystore),
separator: /[\|' ,\-|(\n)]+/
}, (err, db) => {
database = db;
done(err);
});
});
describe('Search-index', () => {
let search_results = [];
const total_docs = 3; // this number sets how many concurrent calls we will make
const create_empty_array = length => Array.apply(null, Array(length));
it('Concurrent document additions', done => {
const insert_addition_promise = (item, index) => {
const document = JSON.parse(JSON.stringify(ref_doc)); // deep copy
document.count = index;
return promisify(database.concurrentAdd)({}, [document]);
};
const promises = Promise.all(create_empty_array(total_docs).map(insert_addition_promise));
promises
.then(() => done())
.catch(done);
});
it('Search concurrently the documents', done => {
let count = 0;
database.search({
query: {
AND: {
"main": [
ref_doc.main
]
}
},
pageSize: total_docs
})
.on('data', hit => {
count ++;
search_results.push(hit.id);
})
.on('error', done)
.on('end', () => {
should.equal(count, total_docs);
done();
});
});
it('Remove concurrently the documents', done => {
const removal_promise = id => promisify(database.del)([id]);
const promises = Promise.all(search_results.map(removal_promise));
promises
.then(() => done())
.catch(done);
});
it('Verify the removed documents are not searchable anymore', done => {
let count = 0;
database.search({
query: {
AND: {
"main": [
ref_doc.main
]
}
},
pageSize: total_docs
})
.on('data', () => {
count ++;
})
.on('error', done)
.on('end', () => {
should.equal(count, 0);
done();
});
});
});
after('Close database', done => {
database.close(done);
});
after('Cleanup tmp directory', done => {
fs.remove(tmp_path, done);
});
const test = require('tape')
const SearchIndex = require('search-index');
var index, ids
test('init search index', function (t) {
t.plan(1)
SearchIndex({
indexPath: 'test/sandbox/376-test'
}, function(err, idx) {
t.error(err)
index = idx
})
})
test('add 1000 identical docs using concurrent add', function (t) {
t.plan(1000)
for (var i = 0; i < 1000; i++) {
const batch = {
main: 'test.assembly',
count: i
};
index.concurrentAdd({}, [batch], function (err) {
t.error(err)
})
}
})
test('harvest ids', function (t) {
t.comment('get data!')
t.plan(1)
ids = []
index
.search({
pageSize: 1000
})
.on('data', function(doc) {
ids.push(doc.id)
}).on('end', function() {
t.is(ids.length, 1000)
})
})
test('delete docs', function (t) {
t.plan(1000)
for (var i = 0; i < 1000; i++) {
index.del([ids[i]], function(err) {
t.error(err)
})
}
})
test('search should return no docs', function (t) {
t.plan(1)
var i = 0
index
.search()
.on('data', function (doc) {
i++
}).on('end', function () {
t.is(i, 0)
})
-
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment