Skip to content

Instantly share code, notes, and snippets.

@crwilcox
Created May 31, 2018 16:12
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save crwilcox/fa8bb2fb164afdc29747c489ee34ad8a to your computer and use it in GitHub Desktop.
Save crwilcox/fa8bb2fb164afdc29747c489ee34ad8a to your computer and use it in GitHub Desktop.
Repro.js script for spanner
const assert = require('assert')
const async = require('async')
var pad = require('pad');
const Spanner = require('./src')
const spanner = new Spanner()
const instance = spanner.instance('issue-180-instance')
const database = instance.database('issue-180-database')
const table = database.table('issuetable')
const NUM_ROWS_TO_INSERT_AND_QUERY = 99000
let initialize = false
if (!process.argv[2]) {
console.log("create, insert, repro, or delete required as arg")
return
}
const NUM_ATTEMPTS = process.argv[3] || 5
async function init() {
let mode = process.argv[2]
switch (mode) {
case 'create':
console.log('creating instance')
await prepareInstance()
console.log('creating database')
await prepareDatabase()
console.log('creating table')
await prepareTable()
return;
case 'insert':
console.log('inserting rows')
await insertRows()
return;
case 'repro':
console.log('repro, NUM_ATTEMPTS=' + NUM_ATTEMPTS + ' time:' + new Date())
await repro()
return;
case 'delete':
await deleteTable();
return;
default:
console.log("create, insert, repro, or delete required as arg")
return;
}
}
async function repro() {
// return new Promise((resolve, reject) => {
// runQuery(err => {
// if (err) reject(err)
// console.log('Query was successful')
// resolve();
// });
// });
let startTime = new Date()
async.times(NUM_ATTEMPTS, runQueryBatches, err => {
if (err) throw err
console.log('Query was successful EndTime:' + new Date() +
' Elapsed: ' + Math.abs(new Date() - startTime))
})
function runQueryBatches(_, callback) {
async.times(NUM_ATTEMPTS, runQuery, callback)
}
// in original impl:
/*
o resume token
no resume token
no resume token
assert.js:42
throw new errors.AssertionError({
^
AssertionError [ERR_ASSERTION]: [ 'field0', 'field1' ] deepEqual [ 'field0', 'field1', 'field2', 'field3', 'recordId' ]
at DestroyableTransform.database.runStream.on.on (/Users/crwilcox/workspace/google-cloud-node/nodejs-spanner/issue-180.js:49:16)
at emitOne (events.js:116:13)
at DestroyableTransform.emit (events.js:211:7)
at addChunk (/Users/crwilcox/workspace/google-cloud-node/nodejs-spanner/node_modules/readable-stream/lib/_stream_readable.js:291:12)
at readableAddChunk (/Users/crwilcox/workspace/google-cloud-node/nodejs-spanner/node_modules/readable-stream/lib/_stream_readable.js:278:11)
at DestroyableTransform.Readable.push (/Users/crwilcox/workspace/google-cloud-node/nodejs-spanner/node_modules/readable-stream/lib/_stream_readable.js:245:10)
at DestroyableTransform.Transform.push (/Users/crwilcox/workspace/google-cloud-node/nodejs-spanner/node_modules/readable-stream/lib/_stream_transform.js:148:32)
at Immediate.loopyloop [as _onImmediate] (/Users/crwilcox/workspace/google-cloud-node/nodejs-spanner/node_modules/split-array-stream/build/src/index.js:55:36)
at runCallback (timers.js:794:20)
at tryOnImmediate (timers.js:752:5)
*/
function runQuery(_, callback) {
const query = {
sql: 'SELECT `root`.`account_created_on` as `field0`, `root`.`Short_Text` as `field1`, `root`.`Short_Text1` as `field2`, `root`.`account_CID` as `field3`, `root`.`recordId` as `recordId` FROM `issuetable` AS root LIMIT ' + NUM_ROWS_TO_INSERT_AND_QUERY,
json: true,
}
let numRowsReceived = 0
let rows = []
database.runStream(query)
.on('error', callback)
.on('data', (data) => {
//rows = rows.concat(data)
let id = data.recordId
// if(id.lastIndexOf('9196') > -1){
// console.log('here')
// }
try{
assert.deepEqual(Object.keys(data), ["field0", "field1", "field2", "field3", "recordId"])
assert.equal(data.field0, 'account_created_on_' + id)
assert.equal(data.field1, 'Short_Text_' + id)
assert.equal(data.field2, 'Short_Text1_' + id)
assert.equal(data.field3, 'account_CID_' + id)
} catch (err){
console.log(err);
}
if (id != numRowsReceived){
console.log("Expected Id to match numRows: " + id + ":" + numRowsReceived)
}
//console.log(data.recordId)
numRowsReceived++
})
.on('end', () => {
assert.strictEqual(numRowsReceived, NUM_ROWS_TO_INSERT_AND_QUERY)
callback()
})
}
}
init()
function prepareInstance() {
return new Promise((resolve, reject) => {
instance.create({
config: 'regional-us-central1',
nodes: 1,
}, (err, instance, operation) => {
if (err) {
if (err.code === 6) {
resolve()
} else {
reject(err)
}
} else {
resolve(operation.promise())
}
})
})
}
function prepareDatabase() {
return new Promise((resolve, reject) => {
database.create(err => {
if (!err || (err && err.code === 6)) {
resolve()
} else {
reject(err)
}
})
})
}
function prepareTable() {
const schema = `
CREATE TABLE issuetable (
recordId STRING(36) NOT NULL,
account_CID STRING(255),
account_created_on STRING(1024),
account_created_on_unixTimestamp FLOAT64,
account_dashboard_tabs STRING(255),
account_ok_tabs STRING(255),
account_summary_content_tabs STRING(255),
account_to_creator STRING(36),
account_to_customer_admin STRING(36),
account_view_tabs STRING(255),
Address STRING(1024),
Address1 STRING(1024),
Address1_city STRING(255),
Address1_postalcode STRING(255),
Address1_provinceterritory STRING(255),
Address_city STRING(255),
Address_postalcode STRING(255),
Address_provinceterritory STRING(255),
current_document_number FLOAT64,
customer_content_tabs STRING(255),
Email STRING(255),
fund_to_account STRING(36),
FundsTab_AccountSubNavigation STRING(255),
Number FLOAT64,
Number1 FLOAT64,
Number_of_Payments FLOAT64,
payment_plan_to_account STRING(36),
Phone_Number STRING(1024),
Phone_Number1 STRING(1024),
requested_payment_plan_to_account STRING(36),
RequestRelationshipCID STRING(255),
RequestRelationshipContentTab STRING(255),
Selection_Field___Static STRING(MAX),
Short_Text STRING(255),
Short_Text1 STRING(255),
Short_Text4 STRING(255),
upload_customer_csv STRING(1024),
upload_customer_csv_name STRING(255),
user_to_accounts STRING(36),
) PRIMARY KEY (recordId)`
return new Promise((resolve, reject) => {
table.create(schema, (err, tableName, operation) => {
if (err) {
if (err.code === 6) {
resolve()
} else {
reject(err)
}
} else {
operation.promise().then(resolve);
}
})
})
}
function insertRows() {
function insertRows(n, count) {
const rows = []
for (var i = n; i < n + count; i++) {
// let id = Math.round(Date.now() * Math.random())
let id = pad(10, i)
rows.push({
account_created_on: 'account_created_on_' + id,
Short_Text: 'Short_Text_' + id,
Short_Text1: 'Short_Text1_' + id,
account_CID: 'account_CID_' + id,
recordId: id
})
}
return table.insert(rows)
}
const numRowsPerAPICall = 3000
const apiCallPromises = []
for(var i = 0; i < NUM_ROWS_TO_INSERT_AND_QUERY; i += numRowsPerAPICall){
apiCallPromises.push(insertRows(i, numRowsPerAPICall))
}
return Promise.all(apiCallPromises)
}
async function deleteTable() {
return new Promise((resolve, reject) => {
table.delete((err, operation) => {
if (!err) {
operation.promise().then(resolve);
} else {
reject(err)
}
})
})
}
@crwilcox
Copy link
Author

crwilcox commented May 31, 2018

Run once:

node repro.js create
node repro.js insert

Run:

node repro.js repro

If you want to clean up resources:

node repro.js delete

I typically this in a while loop

while :
do
  node repro.js repro
done

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment