Skip to content

Instantly share code, notes, and snippets.

@mr-pascal
Last active May 28, 2023 16:53
Show Gist options
  • Save mr-pascal/93d351e01b6d1c94ced23d6f9b036816 to your computer and use it in GitHub Desktop.
Save mr-pascal/93d351e01b6d1c94ced23d6f9b036816 to your computer and use it in GitHub Desktop.
const { Bigtable, Table, Instance, Row, RawFilter } = require('@google-cloud/bigtable');
// -- MAKE SURE TO CHANGE THIS TO YOUR NEEDS!! --
const projectId = 'YOUR_GCP_PROJECT';
const keyFilename = 'YOUR_KEY_FILE.json';
// ----------------------------------------------
// Create CBT client
const bigtable = new Bigtable({
keyFilename,
projectId
});
// -- Defining some constants ------
const INSTANCE_ID = 'my-instance';
const CLUSTER_ID = 'my-cluster';
const TABLE_ID = 'my-table';
const COLUMN_FAMILY_ID_1 = 'CF_1';
const COLUMN_FAMILY_ID_2 = 'CF_2';
const COLUMN_QUALIFIER_1 = 'CF_1';
// ---------------------------------
/**
*
* @param {Instance} instance Bigtable instance
* @param {string} tableId ID of the table to create
* @returns {Promise<Table>}
*/
const createTable = async (instance, tableId) => {
const table = instance.table(tableId);
const tableOptions = {
families: [{
name: COLUMN_FAMILY_ID_1,
rule: {
versions: 2,
},
}, {
name: COLUMN_FAMILY_ID_2,
rule: {
versions: 1,
},
}]
};
console.log(`Created table: '${tableId}'`);
const [createdTable] = await table.create(tableOptions);
return createdTable;
}
/**
*
* @param {Table} table
* @param {string} rowKey
* @param {RawFilter} filter
* @returns {Promise<Row>}
*/
const getRow = async (table, rowKey, filter) => {
const [singleRow] = await table.row(rowKey).get({ filter });
return singleRow;
}
/**
*
* @param {Table} table
* @param {string} start
* @param {string} end
* @param {RawFilter} filter
* @returns {Promise<Array<Row>>}
*/
const getRows = async (table, start, end, filter) => {
const [rows] = await table.getRows({
filter,
ranges: [{
start,
end
}]
})
return rows;
}
/**
*
* @param {Table} table
* @param {RawFilter} filter
* @returns {Promise<Array<Row>>}
*/
const getRowsStreaming = (table, filter) => {
return new Promise((resolve) => {
const rows = [];
table
.createReadStream({ filter })
.on('error', err => {
console.error(error);
})
.on('data', row => {
rows.push(row);
})
.on('end', (a) => {
// All rows retrieved.
resolve(rows);
});
});
}
/**
*
* @param {Row} row
*/
const printSimpleRow = (row) => {
const values = row.data[COLUMN_FAMILY_ID_1][COLUMN_QUALIFIER_1].map(r => r.value);
console.log(`\tKey: ${row.id} | Values: ${JSON.stringify(values)}`)
}
/**
*
* @param {Table} table
*/
const readData = async (table) => {
const filter = [
{
column: {
cellLimit: 2,
},
},
];
console.log('\nReading a single row by row key');
const row = await getRow(table, 'e', filter);
printSimpleRow(row);
/** Output:
Key: e | Value: [10,5]
*/
const range = ['b', 'd'];
console.log(`\nReading the table from '${range[0]}' to '${range[1]}'`);
const rows = await getRows(table, range[0], range[1], filter);
rows.forEach(printSimpleRow)
/** Output:
Key: b | Value: [2]
Key: c | Value: [3]
Key: d | Value: [4]
*/
console.log('\nReading the entire table');
const rowsStreaming = await getRowsStreaming(table, filter);
rowsStreaming.forEach(printSimpleRow)
/** Output:
Key: a | Value: [1]
Key: b | Value: [2]
Key: c | Value: [3]
Key: d | Value: [4]
Key: e | Value: [10,5]
*/
}
/**
*
* @param {Object} el
* @param {string} el.key
* @param {number} el.value
* @returns
*/
const createSimpleRow = (el) => ({
key: el.key,
data: {
[COLUMN_FAMILY_ID_1]: {
[COLUMN_QUALIFIER_1]: {
timestamp: new Date(),
value: el.value,
},
},
},
});
/**
* Small helper method to sleep for certain milliseconds
* @param {number} ms
* @returns {Promise<void>}
*/
const sleep = (ms) => new Promise(resolve => setTimeout(resolve, ms));
/**
*
* @param {Table} table
*/
const writeData = async (table) => {
let data, rowsToInsert;
// Write first set of data
data = [{ key: 'a', value: 1 }, { key: 'b', value: 2 }, { key: 'c', value: 3 }, { key: 'd', value: 4 }, { key: 'e', value: 5 }];
rowsToInsert = data.map(createSimpleRow);
await table.insert(rowsToInsert);
sleep(250);
data = [{ key: 'e', value: 10 }];
rowsToInsert = data.map(createSimpleRow);
await table.insert(rowsToInsert);
}
/**
* Creates a new instance with 'instanceId'
* @param {string} instanceId The instance ID
* @param {string} clusterId The cluster ID
* @returns {Instance} The Bigtable instance
*/
const createNewInstance = async (instanceId, clusterId) => {
const instance = bigtable.instance(instanceId);
console.log('Creating a CBT instance');
// Creates a Cloud BigTable instance
const instanceOptions = {
clusters: [
{
id: clusterId,
nodes: 1,
location: 'europe-west3-a',
storage: 'ssd',
},
],
};
// Create production instance with given options
const [prodInstance] = await instance.create(instanceOptions);
console.log(`Created Instance: '${prodInstance.id}'`);
return prodInstance;
}
/**
* Delete the instance provided by the instanceId
* @param {string} instanceId
*/
const deleteInstance = async (instanceId) => {
const instance = bigtable.instance(instanceId);
console.log('Deleting Instance');
await instance.delete();
console.log(`Instance deleted: '${instance.id}'`);
}
/**
* Main method doing the following:
* 1. Create a new Bigtable instance
* 2. Create a new table
* 3. Write "normal" data
* 4. Read the rows created in step 3
* 5. Delete the created instance
*/
const main = async () => {
try {
const instance = await createNewInstance(INSTANCE_ID, CLUSTER_ID)
const table = await createTable(instance, TABLE_ID);
// Write + Read - Normal data
await writeData(table);
await readData(table);
} catch (e) {
console.error(e);
} finally {
// We always want to delete the instance again
await deleteInstance(INSTANCE_ID);
}
}
main();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment