Skip to content

Instantly share code, notes, and snippets.

@wallabyway
Last active September 13, 2023 21:07
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save wallabyway/9c3ffc4e4813f1b0bb10fb003d7071b7 to your computer and use it in GitHub Desktop.
Save wallabyway/9c3ffc4e4813f1b0bb10fb003d7071b7 to your computer and use it in GitHub Desktop.
properties db - convert json to postgreSQL or sqliteDB

Setup

Choose which importer... either MS-SQL-Server, SQLite or PostgreSQL Follow the setup instructions. Install and run

How to use

  1. get a dbid from Forge-viewer or Unity, say it's dbid 3114
  2. now do a database query like this...

SQL Examples:

  • select * from properties where dbid=3114;
  • select * from properties where category like '%IFC%';
/* Pull Properties db files (objects_vals.json.gz _id, _val, _eav, _attr) into MS-SQL-Server database
Example usage:
1. copy your 5 files ( objects_vals.json.gz _id, _val, _eav, _attr) into 'svf' folder
2. > npm install @frangiskos/mssql readline
3. > node import_mssql.js pathTo/svf/output/0 myNewDatabase
*/
const fs = require('fs');
const path = require('path');
const zlib = require('zlib');
const readline = require('readline');
const { sql } = require('@frangiskos/mssql');
const { env } = require('process');
function readFile(path) {
let stream = fs.createReadStream(path)
if(/\.gz$/i.test(path)) {
stream = stream.pipe(zlib.createUnzip());
}
return readline.createInterface({
input: stream,
crlfDelay: Infinity
})
}
class importDB {
async initDatabase(szFolderPath, szDatabase) {
const sqlConfig = {
server: env.server, //"database-1.c3efranvulke.us-west-2.rds.amazonaws.com",
user: env.user, //"adminxx",
password: env.password, //'Password1',
database: szDatabase,
requestTimeout: 60000,
encrypt: false
};
await sql.init(sqlConfig);
this.szFolderPath = szFolderPath;
await sql.query(`DROP TABLE IF EXISTS _objects_eav; CREATE TABLE _objects_eav (id int PRIMARY KEY, entity_id INTEGER, attribute_id INTEGER, value_id INTEGER);`);
await sql.query(`DROP TABLE IF EXISTS _objects_id; CREATE TABLE _objects_id (id int IDENTITY(1,1) PRIMARY KEY, external_id TEXT, viewable_id TEXT);`);
await sql.query(`DROP TABLE IF EXISTS _objects_attr; CREATE TABLE _objects_attr (id int IDENTITY(1,1) PRIMARY KEY, name TEXT, category TEXT, data_type INTEGER, data_type_context TEXT, description TEXT, display_name TEXT, flags INTEGER, display_precision INTEGER);`);
await sql.query(`DROP TABLE IF EXISTS _objects_val; CREATE TABLE _objects_val (id int IDENTITY(1,1) PRIMARY KEY, value TEXT);`);
await sql.query(`DROP VIEW IF EXISTS properties;`);
await sql.query(`
CREATE VIEW properties AS SELECT ids.id AS dbid, attrs.category AS category, attrs.name AS name, vals.value AS value
FROM _objects_eav eav
LEFT JOIN _objects_id ids ON ids.id = eav.entity_id
LEFT JOIN _objects_attr attrs ON attrs.id = eav.attribute_id
LEFT JOIN _objects_val vals ON vals.id = eav.value_id
`);
}
async bulkInsertTable (szTable, szInputArr, filename) {
console.log(`loading ${filename}`);
const lineReader = readFile(path.join(this.szFolderPath, filename));
// prepare entire list into memory, one large JSON array
const megaList = [];
for await (const line of lineReader) {
if ((line == "[0,") || (line.slice(-1) != ",")) continue;
const obj = {};
if (szInputArr.length == 1)
obj[szInputArr] = line.slice(1,-2);
else {
const vals = JSON.parse(line.slice(0,-1))
szInputArr.map ( (col,i) => {
obj[col] = vals[i];
})
}
megaList.push(obj);
}
console.log(`Table: ${szTable} - bulk inserting ${megaList.length} items`);
await sql.functions.bulkInsert(szTable, megaList);
}
async addEAVTable(szTable, params) {
console.log(`loading objects_avs & objects_offs.json.gz...`)
const db = this.db;
const read = (filename) => JSON.parse(zlib.gunzipSync(fs.readFileSync(path.join(this.szFolderPath, filename))).toString());
const pdb = {
_offsets: read('objects_offs.json.gz'),
_avs: read('objects_avs.json.gz'),
};
let eavIdx = 0;
let megaList = [];
for (let i = 1, len = pdb._offsets.length; i < len; i++) {
const page = pdb._avs.slice(pdb._offsets[i] * 2, i < len - 1 ? pdb._offsets[i + 1] * 2 : pdb._avs.length);
if (page.length === 0) {
continue;
}
const values = [];
let eavIdx2=0;
for (let j = 0; j < page.length / 2; j++) {
megaList.push({
id: eavIdx+1,
entity_id: i,
attribute_id: page[eavIdx2],
value_id: page[eavIdx2+1]
});
eavIdx++;
eavIdx2+=2;
}
}
console.log(`Table: ${szTable} - bulk inserting ${megaList.length} items`)
await sql.functions.bulkInsert(szTable, megaList);
}
}
async function convert(szFolderPath = `svf/output/0`, szDatabase = "Testdb") {
const db = new importDB();
await db.initDatabase(szFolderPath, szDatabase);
await db.bulkInsertTable(`_objects_attr`,["name", "category", "data_type", "data_type_context", "description", "display_name", "flags", "display_precision"], 'objects_attrs.json.gz');
await db.bulkInsertTable(`_objects_id`,["external_id"], 'objects_ids.json.gz');
await db.bulkInsertTable(`_objects_val`,["value"], 'objects_vals.json.gz');
await db.addEAVTable(`_objects_eav`,["id", "entity_id", "attribute_id", "value_id"]);
console.log('finished');
}
convert( process.argv[2], process.argv[3] ).catch(err => {
console.error(err);
process.exit(1)
})
/* Pull Properties db files (objects_vals.json.gz _id, _val, _eav, _attr) into postGreSQL database
Example usage:
1. copy your 5 files ( objects_vals.json.gz _id, _val, _eav, _attr) into 'svf' folder
2. > npm install pg readline
3. > node import_postgresql.js
*/
const fs = require('fs');
const path = require('path');
const zlib = require('zlib');
const readline = require('readline');
const pg = require('pg');
function readFile(path) {
let stream = fs.createReadStream(path)
if(/\.gz$/i.test(path)) {
stream = stream.pipe(zlib.createUnzip());
}
return readline.createInterface({
input: stream,
crlfDelay: Infinity
})
}
class importDB {
async initDatabase(szFolderPath, szDatabase = 'ict-db') {
const pool = new pg.Pool({
protocol: 'pg',
user: 'postgres',
host: 'localhost',
port: 5432,
database: szDatabase
});
const db = await pool.connect();
this.db = db;
this.szFolderPath = szFolderPath;
await db.query('BEGIN');
await db.query('CREATE TABLE IF NOT EXISTS _objects_eav (id INTEGER PRIMARY KEY, entity_id INTEGER, attribute_id INTEGER, value_id INTEGER);');
await db.query('CREATE TABLE IF NOT EXISTS _objects_id (id SERIAL PRIMARY KEY, external_id TEXT, viewable_id TEXT);');
await db.query('CREATE TABLE IF NOT EXISTS _objects_attr (id SERIAL PRIMARY KEY, name TEXT, category TEXT, data_type INTEGER, data_type_context TEXT, description TEXT, display_name TEXT, flags INTEGER, display_precision INTEGER);');
await db.query('CREATE TABLE IF NOT EXISTS _objects_val (id SERIAL PRIMARY KEY, value TEXT);');
await db.query(`
CREATE OR REPLACE VIEW properties AS SELECT ids.id AS dbid, attrs.category AS category, attrs.name AS name, vals.value AS value
FROM _objects_eav eav
LEFT JOIN _objects_id ids ON ids.id = eav.entity_id
LEFT JOIN _objects_attr attrs ON attrs.id = eav.attribute_id
LEFT JOIN _objects_val vals ON vals.id = eav.value_id
`);
await db.query('COMMIT');
}
async addTable (sql, filename) {
console.log(`importing ${filename}...`);
const db = this.db;
await db.query('BEGIN');
const lineReader = readFile(path.join(this.szFolderPath, filename));
for await(const line of lineReader) {
if ((line == "[0,") || (line.slice(-1) != ",")) continue;
const jsn = JSON.parse(line.slice(0,-1));
await this.db.query(sql, (Array.isArray(jsn) ? jsn : [jsn]));
}
await db.query('COMMIT');
}
async addEAVTable(query) {
console.log(`importing objects_avs & objects_offs.json.gz...`)
const db = this.db;
await db.query('BEGIN');
const read = (filename) => JSON.parse(zlib.gunzipSync(fs.readFileSync(path.join(this.szFolderPath, filename))).toString());
const pdb = {
_offsets: read('objects_offs.json.gz'),
_avs: read('objects_avs.json.gz'),
};
let eavIdx = 0;
for (let i = 1, len = pdb._offsets.length; i < len; i++) {
const page = pdb._avs.slice(pdb._offsets[i] * 2, i < len - 1 ? pdb._offsets[i + 1] * 2 : pdb._avs.length);
if (page.length === 0) {
continue;
}
const values = [];
let eavIdx2=0;
for (let j = 0; j < page.length / 2; j++) {
await this.db.query(query, [eavIdx+1, i, page[eavIdx2], page[eavIdx2+1] ]);
eavIdx++;
eavIdx2+=2;
}
}
await db.query('COMMIT');
}
}
async function convert(szFolderPath = `svf`, szDatabase = "ict-db") {
const db = new importDB();
await db.initDatabase(szFolderPath, szDatabase);
await db.addTable(`INSERT INTO _objects_val (value) VALUES ( $1 )`, 'objects_vals.json.gz');
await db.addTable(`INSERT INTO _objects_id (external_id) VALUES ( $1 )`, 'objects_ids.json.gz');
await db.addTable(`INSERT INTO _objects_attr (name, category, data_type, data_type_context, description, display_name, flags, display_precision) VALUES ( $1, $2, $3, $4, $5, $6, $7, $8 )`, 'objects_attrs.json.gz');
await db.addEAVTable(`INSERT INTO _objects_eav (id, entity_id, attribute_id, value_id ) VALUES ($1, $2, $3, $4)`);
console.log('finished')
}
convert( process.argv[2], process.argv[3] ).catch(err => {
console.error(err);
process.exit(1)
})
/*
Example usage:
> put your objects_vals.json.gz files into 'myJSONfolder'
npm install sqlite-async@1.1.3 readline
node import_sqlite.js myJSONfolder resultsqlite.db
*/
const fs = require('fs');
const path = require('path');
const zlib = require('zlib');
const readline = require('readline');
const sqlite3 = require('sqlite-async');
function readFile(path) {
let stream = fs.createReadStream(path)
if(/\.gz$/i.test(path)) {
stream = stream.pipe(zlib.createUnzip());
}
return readline.createInterface({
input: stream,
crlfDelay: Infinity
})
}
class importDB {
async initDatabase(szFolderPath, szFilenameSQLite) {
const db = await (sqlite3.open(szFilenameSQLite));
this.db = db;
this.szFolderPath = szFolderPath;
await db.run('PRAGMA journal_mode = off;');
await db.run('PRAGMA synchronous = off;');
await db.run('CREATE TABLE _objects_eav (id INTEGER PRIMARY KEY, entity_id INTEGER, attribute_id INTEGER, value_id INTEGER);');
await db.run('CREATE TABLE _objects_id (id INTEGER PRIMARY KEY, external_id BLOB, viewable_id BLOB);');
await db.run('CREATE TABLE _objects_attr (id INTEGER PRIMARY KEY, name TEXT, category TEXT, data_type INTEGER, data_type_context TEXT, description TEXT, display_name TEXT, flags INTEGER, display_precision INTEGER);');
await db.run('CREATE TABLE _objects_val (id INTEGER PRIMARY KEY, value BLOB);');
await db.run(`
CREATE VIEW properties AS SELECT ids.id AS dbid, attrs.category AS category, IFNULL(attrs.display_name, attrs.name) AS name, vals.value AS value
FROM _objects_eav eav
LEFT JOIN _objects_id ids ON ids.id = eav.entity_id
LEFT JOIN _objects_attr attrs ON attrs.id = eav.attribute_id
LEFT JOIN _objects_val vals ON vals.id = eav.value_id
`);
/*
await db.run(`
CREATE INDEX idx_dbid ON _objects_eav (entity_id);
CREATE INDEX idx_attribute_id_value_id ON _objects_eav (attribute_id, value_id);
`)
*/
}
async addTable (sql, filename = 'objects_attrs.json') {
console.log(`importing ${filename}...`)
const lineReader = readFile(path.join(this.szFolderPath, filename));
for await(const line of lineReader) {
if ((line == "[0,") || (line.slice(-1) != ",")) continue;
const jsn = JSON.parse(line.slice(0,-1));
await this.db.run(sql, jsn);
}
}
async addEAVTable() {
console.log(`importing objects_avs & objects_offs.json.gz...`)
const read = (filename) => JSON.parse(zlib.gunzipSync(fs.readFileSync(path.join(this.szFolderPath, filename))).toString());
const pdb = {
_offsets: read('objects_offs.json.gz'),
_avs: read('objects_avs.json.gz'),
};
let eavIdx = 1;
for (let i = 1, len = pdb._offsets.length; i < len; i++) {
const page = pdb._avs.slice(pdb._offsets[i] * 2, i < len - 1 ? pdb._offsets[i + 1] * 2 : pdb._avs.length);
if (page.length === 0) {
continue;
}
const values = [];
for (let j = 0; j < page.length / 2; j++) {
values.push(`(${eavIdx++}, ${i}, ?, ?)`);
}
const query = `INSERT INTO _objects_eav VALUES ${values.join(',')};`;
await this.db.run(query, page);
}
}
}
async function convert(szFolderPath = `svf`, szFilenameSQLite = "sample.db") {
const db = new importDB();
await db.initDatabase(szFolderPath, szFilenameSQLite);
await db.addTable(`INSERT INTO _objects_attr VALUES ( null, ?, ?, ?, ?, ?, ?, ?, ? )`, 'objects_attrs.json.gz');
await db.addTable(`INSERT INTO _objects_val VALUES ( null, ? )`, 'objects_vals.json.gz');
await db.addTable(`INSERT INTO _objects_id VALUES ( null, ?,? )`, 'objects_ids.json.gz');
await db.addEAVTable();
console.log('finished')
}
convert( process.argv[2], process.argv[3] ).catch(err => {
console.error(err);
process.exit(1)
})
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment