|
/* Pull Properties db files (objects_vals.json.gz _id, _val, _eav, _attr) into MS-SQL-Server database |
|
|
|
Example usage: |
|
1. copy your 5 files ( objects_vals.json.gz _id, _val, _eav, _attr) into 'svf' folder |
|
2. > npm install @frangiskos/mssql readline |
|
3. > node import_mssql.js pathTo/svf/output/0 myNewDatabase |
|
*/ |
|
|
|
const fs = require('fs'); |
|
const path = require('path'); |
|
const zlib = require('zlib'); |
|
const readline = require('readline'); |
|
const { sql } = require('@frangiskos/mssql'); |
|
const { env } = require('process'); |
|
|
|
function readFile(path) { |
|
let stream = fs.createReadStream(path) |
|
|
|
if(/\.gz$/i.test(path)) { |
|
stream = stream.pipe(zlib.createUnzip()); |
|
} |
|
|
|
return readline.createInterface({ |
|
input: stream, |
|
crlfDelay: Infinity |
|
}) |
|
} |
|
|
|
class importDB { |
|
|
|
async initDatabase(szFolderPath, szDatabase) { |
|
const sqlConfig = { |
|
server: env.server, //"database-1.c3efranvulke.us-west-2.rds.amazonaws.com", |
|
user: env.user, //"adminxx", |
|
password: env.password, //'Password1', |
|
database: szDatabase, |
|
requestTimeout: 60000, |
|
encrypt: false |
|
}; |
|
await sql.init(sqlConfig); |
|
this.szFolderPath = szFolderPath; |
|
await sql.query(`DROP TABLE IF EXISTS _objects_eav; CREATE TABLE _objects_eav (id int PRIMARY KEY, entity_id INTEGER, attribute_id INTEGER, value_id INTEGER);`); |
|
await sql.query(`DROP TABLE IF EXISTS _objects_id; CREATE TABLE _objects_id (id int IDENTITY(1,1) PRIMARY KEY, external_id TEXT, viewable_id TEXT);`); |
|
await sql.query(`DROP TABLE IF EXISTS _objects_attr; CREATE TABLE _objects_attr (id int IDENTITY(1,1) PRIMARY KEY, name TEXT, category TEXT, data_type INTEGER, data_type_context TEXT, description TEXT, display_name TEXT, flags INTEGER, display_precision INTEGER);`); |
|
await sql.query(`DROP TABLE IF EXISTS _objects_val; CREATE TABLE _objects_val (id int IDENTITY(1,1) PRIMARY KEY, value TEXT);`); |
|
await sql.query(`DROP VIEW IF EXISTS properties;`); |
|
await sql.query(` |
|
CREATE VIEW properties AS SELECT ids.id AS dbid, attrs.category AS category, attrs.name AS name, vals.value AS value |
|
FROM _objects_eav eav |
|
LEFT JOIN _objects_id ids ON ids.id = eav.entity_id |
|
LEFT JOIN _objects_attr attrs ON attrs.id = eav.attribute_id |
|
LEFT JOIN _objects_val vals ON vals.id = eav.value_id |
|
`); |
|
} |
|
|
|
async bulkInsertTable (szTable, szInputArr, filename) { |
|
console.log(`loading ${filename}`); |
|
const lineReader = readFile(path.join(this.szFolderPath, filename)); |
|
|
|
// prepare entire list into memory, one large JSON array |
|
const megaList = []; |
|
for await (const line of lineReader) { |
|
if ((line == "[0,") || (line.slice(-1) != ",")) continue; |
|
const obj = {}; |
|
if (szInputArr.length == 1) |
|
obj[szInputArr] = line.slice(1,-2); |
|
else { |
|
const vals = JSON.parse(line.slice(0,-1)) |
|
szInputArr.map ( (col,i) => { |
|
obj[col] = vals[i]; |
|
}) |
|
} |
|
megaList.push(obj); |
|
} |
|
console.log(`Table: ${szTable} - bulk inserting ${megaList.length} items`); |
|
await sql.functions.bulkInsert(szTable, megaList); |
|
} |
|
|
|
async addEAVTable(szTable, params) { |
|
console.log(`loading objects_avs & objects_offs.json.gz...`) |
|
|
|
const db = this.db; |
|
const read = (filename) => JSON.parse(zlib.gunzipSync(fs.readFileSync(path.join(this.szFolderPath, filename))).toString()); |
|
const pdb = { |
|
_offsets: read('objects_offs.json.gz'), |
|
_avs: read('objects_avs.json.gz'), |
|
}; |
|
let eavIdx = 0; |
|
let megaList = []; |
|
for (let i = 1, len = pdb._offsets.length; i < len; i++) { |
|
const page = pdb._avs.slice(pdb._offsets[i] * 2, i < len - 1 ? pdb._offsets[i + 1] * 2 : pdb._avs.length); |
|
if (page.length === 0) { |
|
continue; |
|
} |
|
const values = []; |
|
let eavIdx2=0; |
|
for (let j = 0; j < page.length / 2; j++) { |
|
megaList.push({ |
|
id: eavIdx+1, |
|
entity_id: i, |
|
attribute_id: page[eavIdx2], |
|
value_id: page[eavIdx2+1] |
|
}); |
|
eavIdx++; |
|
eavIdx2+=2; |
|
} |
|
} |
|
console.log(`Table: ${szTable} - bulk inserting ${megaList.length} items`) |
|
await sql.functions.bulkInsert(szTable, megaList); |
|
} |
|
} |
|
|
|
|
|
async function convert(szFolderPath = `svf/output/0`, szDatabase = "Testdb") { |
|
const db = new importDB(); |
|
await db.initDatabase(szFolderPath, szDatabase); |
|
await db.bulkInsertTable(`_objects_attr`,["name", "category", "data_type", "data_type_context", "description", "display_name", "flags", "display_precision"], 'objects_attrs.json.gz'); |
|
await db.bulkInsertTable(`_objects_id`,["external_id"], 'objects_ids.json.gz'); |
|
await db.bulkInsertTable(`_objects_val`,["value"], 'objects_vals.json.gz'); |
|
await db.addEAVTable(`_objects_eav`,["id", "entity_id", "attribute_id", "value_id"]); |
|
|
|
console.log('finished'); |
|
} |
|
|
|
convert( process.argv[2], process.argv[3] ).catch(err => { |
|
console.error(err); |
|
process.exit(1) |
|
}) |