Skip to content

Instantly share code, notes, and snippets.

@kyletaylored
Last active November 20, 2020 06:03
Show Gist options
  • Save kyletaylored/b48ede133dced4b88debeab2beecbb63 to your computer and use it in GitHub Desktop.
Save kyletaylored/b48ede133dced4b88debeab2beecbb63 to your computer and use it in GitHub Desktop.
Terminus Site Inventory
node_modules

Small helper script for auditing an organization portfolio on Pantheon using Terminus.

# Install node dependencies (just one for CSV output)
npm i

# Either export the org ID as a shell variable... 
export ORG="ORG_UUID"
node site-inventory.js

# ...or pass it in as an argument
node site-inventory.js ORG_UUID
#!/usr/local/bin/node
const fs = require('fs')
const { execSync } = require('child_process')
const { getSites, processLogs } = require('./utils.js')
// Utilities
const org = process.argv[2] !== undefined ? process.argv[2] : process.env['ORG']
console.log({ org })
const env = 'live'
// Get upstreams.
const upstreamFields = 'ID,Machine Name'
const customUpstream = JSON.parse(execSync(`terminus org:upstream:list "${org}" --format json --fields "${upstreamFields}"`))
const coreUpstream = JSON.parse(execSync(`terminus upstream:list --filter type=core --format json --fields "${upstreamFields}"`))
const upstreams = Object.assign({}, customUpstream, coreUpstream)
// Assign promises for sites
let sitePromises = []
for (upstream in upstreams) {
let id = upstreams[upstream].id
let mac = upstreams[upstream].machine_name
sitePromises.push(getSites(id, mac, org))
}
// Start looping through sites.
Promise.all(sitePromises).then((sites) => {
logPromises = []
for (upstream in sites) {
let up = sites[upstream]
// Loop through each individual site
if (up !== undefined || up.length > 0) {
for (s in up) {
let site = up[s]
if (site.plan_name !== 'Sandbox' && site.frozen === false) {
logPromises.push(processLogs(site, env))
}
}
}
}
// Process all logs.
Promise.all(logPromises).then(async (logs) => {
console.log(logs)
})
})
{
"name": "terminus_site_inventory",
"version": "1.0.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"@babel/runtime": {
"version": "7.12.5",
"resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.12.5.tgz",
"integrity": "sha512-plcc+hbExy3McchJCEQG3knOsuh3HH+Prx1P6cLIkET/0dLuQDEnrT+s27Axgc9bqfsmNUNHfscgMUdBpC9xfg==",
"requires": {
"regenerator-runtime": "^0.13.4"
}
},
"async-csv": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/async-csv/-/async-csv-2.1.3.tgz",
"integrity": "sha512-mpsCN+D7mzZeqrlDw7UTPhvDQDlx1i819E9fbKIt8drkgED5FSOlBv3Rk/+sXdevnO2wwlRkVOQ4kdT0AyqPqQ==",
"requires": {
"csv": "^5.1.3"
}
},
"core-util-is": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
},
"csv": {
"version": "5.3.2",
"resolved": "https://registry.npmjs.org/csv/-/csv-5.3.2.tgz",
"integrity": "sha512-odDyucr9OgJTdGM2wrMbJXbOkJx3nnUX3Pt8SFOwlAMOpsUQlz1dywvLMXJWX/4Ib0rjfOsaawuuwfI5ucqBGQ==",
"requires": {
"csv-generate": "^3.2.4",
"csv-parse": "^4.8.8",
"csv-stringify": "^5.3.6",
"stream-transform": "^2.0.1"
}
},
"csv-generate": {
"version": "3.2.4",
"resolved": "https://registry.npmjs.org/csv-generate/-/csv-generate-3.2.4.tgz",
"integrity": "sha512-qNM9eqlxd53TWJeGtY1IQPj90b563Zx49eZs8e0uMyEvPgvNVmX1uZDtdzAcflB3PniuH9creAzcFOdyJ9YGvA=="
},
"csv-parse": {
"version": "4.12.0",
"resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-4.12.0.tgz",
"integrity": "sha512-wPQl3H79vWLPI8cgKFcQXl0NBgYYEqVnT1i6/So7OjMpsI540oD7p93r3w6fDSyPvwkTepG05F69/7AViX2lXg=="
},
"csv-stringify": {
"version": "5.5.1",
"resolved": "https://registry.npmjs.org/csv-stringify/-/csv-stringify-5.5.1.tgz",
"integrity": "sha512-HM0/86Ks8OwFbaYLd495tqTs1NhscZL52dC4ieKYumy8+nawQYC0xZ63w1NqLf0M148T2YLYqowoImc1giPn0g=="
},
"inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"isarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
},
"lodash.compact": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/lodash.compact/-/lodash.compact-3.0.1.tgz",
"integrity": "sha1-VAzjg3dFl1gHRx4WtKK6IeclbKU="
},
"merge-files": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/merge-files/-/merge-files-0.1.2.tgz",
"integrity": "sha512-WTvtH6ZwVy1/scvp1M+Re6PVni87QTjpSLAwxh0L+PlYIxc4VGFFpLjvP7jdJ43gaJ5n+RUIriJ6wKqmqvVVmg==",
"requires": {
"multistream": "^2.1.0"
}
},
"mixme": {
"version": "0.3.5",
"resolved": "https://registry.npmjs.org/mixme/-/mixme-0.3.5.tgz",
"integrity": "sha512-SyV9uPETRig5ZmYev0ANfiGeB+g6N2EnqqEfBbCGmmJ6MgZ3E4qv5aPbnHVdZ60KAHHXV+T3sXopdrnIXQdmjQ=="
},
"multistream": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/multistream/-/multistream-2.1.1.tgz",
"integrity": "sha512-xasv76hl6nr1dEy3lPvy7Ej7K/Lx3O/FCvwge8PeVJpciPPoNCbaANcNiBug3IpdvTveZUcAV0DJzdnUDMesNQ==",
"requires": {
"inherits": "^2.0.1",
"readable-stream": "^2.0.5"
}
},
"node-dig-dns": {
"version": "0.2.12",
"resolved": "https://registry.npmjs.org/node-dig-dns/-/node-dig-dns-0.2.12.tgz",
"integrity": "sha512-9JyAW3R7PbuYvmKjYq4yk12VdGGAwssOFEezG1igE9PlH5/3ZKJrKl7Ll90ae3DDyloXAtpuLV3G+5cJREG9UQ==",
"requires": {
"@babel/runtime": "^7.11.2",
"lodash.compact": "^3.0.1"
}
},
"objects-to-csv": {
"version": "1.3.6",
"resolved": "https://registry.npmjs.org/objects-to-csv/-/objects-to-csv-1.3.6.tgz",
"integrity": "sha512-383eSpS3hmgCksW85KIqBtcbgSW5DDVsCmzLoM6C3q4yzOX2rmtWxF4pbLJ76fz+ufA+4/SwAT4QdaY6IUWmAg==",
"requires": {
"async-csv": "^2.1.3"
}
},
"process-nextick-args": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
"integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="
},
"readable-stream": {
"version": "2.3.7",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz",
"integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==",
"requires": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.3",
"isarray": "~1.0.0",
"process-nextick-args": "~2.0.0",
"safe-buffer": "~5.1.1",
"string_decoder": "~1.1.1",
"util-deprecate": "~1.0.1"
}
},
"regenerator-runtime": {
"version": "0.13.7",
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz",
"integrity": "sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew=="
},
"safe-buffer": {
"version": "5.1.2",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz",
"integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="
},
"stream-transform": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/stream-transform/-/stream-transform-2.0.2.tgz",
"integrity": "sha512-J+D5jWPF/1oX+r9ZaZvEXFbu7znjxSkbNAHJ9L44bt/tCVuOEWZlDqU9qJk7N2xBU1S+K2DPpSKeR/MucmCA1Q==",
"requires": {
"mixme": "^0.3.1"
}
},
"string_decoder": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
"requires": {
"safe-buffer": "~5.1.0"
}
},
"util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8="
}
}
}
{
"name": "terminus_site_inventory",
"version": "1.0.0",
"description": "Get an inventory of sites, domains, and owner email addresses for an org.",
"main": "site-inventory.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "Kyle Taylor <kyle.taylor@pantheon.io>",
"license": "ISC",
"dependencies": {
"merge-files": "^0.1.2",
"node-dig-dns": "^0.2.12",
"objects-to-csv": "^1.3.6"
}
}
#!/usr/local/bin/node
const { execSync } = require('child_process')
const ObjectsToCsv = require('objects-to-csv')
const { getSites, getDomains } = require('./utils.js')
// Utilities
const org = process.argv[2] !== undefined ? process.argv[2] : process.env['ORG']
console.log({ org })
// Get upstreams.
const upstreamFields = 'ID,Machine Name'
const customUpstream = JSON.parse(execSync(`terminus org:upstream:list "${org}" --format json --fields "${upstreamFields}"`))
const coreUpstream = JSON.parse(execSync(`terminus upstream:list --filter type=core --format json --fields "${upstreamFields}"`))
const upstreams = Object.assign({}, customUpstream, coreUpstream)
// Get owners
let owners = JSON.parse(execSync(`terminus org:people:list "${org}" --format json`))
// Assign promises for sites
let sitePromises = []
for (upstream in upstreams) {
let id = upstreams[upstream].id
let mac = upstreams[upstream].machine_name
sitePromises.push(getSites(id, mac, org))
}
// Start looping through sites.
Promise.all(sitePromises).then((sites) => {
domainPromises = []
for (upstream in sites) {
let up = sites[upstream]
// Get domains
if (up !== undefined || up.length > 0) {
for (s in up) {
let site = up[s]
domainPromises.push(getDomains(site))
}
}
}
// Wait for domains for finish processing
Promise.all(domainPromises).then(async (domains) => {
const csv = new ObjectsToCsv(domains)
const fileName = `/tmp/${org} Site Inventory.csv`
// Save to file:
await csv.toDisk(fileName).then((r) => {
console.log(`Domain file: ${fileName}`)
})
// Return the CSV file as string:
// console.log(await csv.toString())
})
})
const { exec, execSync } = require('child_process')
const util = require('util')
const execPromise = util.promisify(exec)
const dig = require('node-dig-dns')
const mergeFiles = require('merge-files')
const fs = require('fs')
/**
* Get a list of sites for each upstream.
* @param {String} id
* @param {String} machine_name
*/
exports.getSites = async (id, machine_name, org) => {
console.log(`Searching for sites in ${machine_name}...`)
let cmd = `terminus org:site:list "${org}" --format json --upstream "${id}"`
let resp = await execPromise(cmd)
let sites = JSON.parse(resp.stdout)
for (site in sites) {
sites[site].upstream = machine_name
// sites[site].owner = await getSiteOwner(sites[site])
}
return sites
}
/**
* Get site owner.
* @param {Object} site
*/
exports.getSiteOwner = async (site) => {
const owner = site.owner
const id = site.id
if (Object.keys(owners).includes(owner)) {
return owners[owner].email
} else {
// Need to look up site owner.
let cmd = `terminus site:team:list --format=json "${id}"`
let resp = await execPromise(cmd)
let team = JSON.parse(resp.stdout)
// Update global owners
Object.assign(owners, team)
return owners[owner].email !== undefined ? owners[owner].email : ''
}
}
/**
* Get custom domains for each site
* @param {Object} site
*/
exports.getDomains = async (site) => {
const id = site.id
const name = site.name
const siteId = `${id}.live`
console.log(`Searching for domains in ${name}...`)
const cmd = `terminus domain:list "${siteId}" --format json --filter type=custom`
const resp = await execPromise(cmd)
const domains = JSON.parse(resp.stdout)
site.custom_domains = Object.keys(domains).join(', ')
return site
}
/**
* Get appserver IP addresses based on site ID.
* @param {string} id
* @param {string} env
*/
exports.getContainerIps = async (id, env) => {
const uri = `appserver.${env}.${id}.drush.in`
let ips = await dig([uri])
.then((result) => {
let ipaddr = result.answer.map((i) => i.value)
return ipaddr
})
.catch((err) => {
console.log('Error:', err)
})
return ips
}
/**
* Rsync app server logs.
* @param {string} ip Appserver IP address
* @param {string} id Target site UUID
* @param {string} env Target site environment
*/
exports.getAppLogs = async function (ip, id, env) {
// Create directory, then rsync files
execSync(`mkdir -p logs/${id}/${ip}`)
execSync(`rsync -zabuP -e 'ssh -p 2222' ${env}.${id}@appserver.${env}.${id}.drush.in:logs/* logs/${id}/${ip}`)
}
/**
* Rsync db server logs.
* @param {string} ip Database server IP address
* @param {string} id Target site UUID
* @param {string} env Target site environment
*/
exports.getDbLogs = async function (ip, id, env) {
// Create directory, then rsync files
execSync(`mkdir -p logs/${id}/${ip}`)
execSync(`rsync -zabuP -e 'ssh -p 2222' ${env}.${id}@dbserver.${env}.${id}.drush.in:logs/* logs/${id}/${ip}`)
}
/**
* Merge common log files from different app containers.
* @param {string} id Site ID
* @param {array} ips Appserver IP addresses
*/
exports.mergeLogs = async function (id, ips) {
// Use for cleanup
const folders = ['nginx', 'php']
// Establish files
const files = {
'error.log': 'nginx/error.log',
'nginx-access.log': 'nginx/nginx-access.log',
'nginx-error.log': 'nginx/nginx-error.log',
'php-error.log': 'php/php-error.log',
'php-fpm-error.log': 'php/php-fpm-error.log',
'php-slow.log': 'php/php-slow.log',
'newrelic.log': 'php/newrelic.log',
'mysqld-slow-query.log': 'mysqld-slow-query.log',
'mysqld.log': 'mysqld.log',
}
// Base status
let status = false
// Loop through files
for (let dest in files) {
let source = files[dest]
let inputPaths = []
const outputPath = `logs/${id}/${dest}`
// Loop through each IP
for (let i = 0; i < ips.length; i++) {
const ip = ips[i]
let path = `logs/${id}/${ip}/${source}`
if (fs.existsSync(path)) {
inputPaths.push(path)
}
}
console.log(inputPaths)
status = await mergeFiles(inputPaths, outputPath)
console.log(`${outputPath}: ${status}`)
}
// Remove old log folders.
for (let f = 0; f < folders.length; f++) {
const folder = folders[f]
for (let i = 0; i < ips.length; i++) {
const ip = ips[i]
exec(`rm -rf logs/${id}/${ip}`)
}
}
}
/**
*
* @param {object} site The Site object
* @param {string} env Target environment
*/
exports.processLogs = async function (site, env) {
let ips = await exports.getContainerIps(site.id, env)
// Extract logs
ips.forEach((ip) => {
exports.getAppLogs(ip, site.id, env)
exports.getDbLogs(ip, site.id, env)
})
// Merge log files
await exports.mergeLogs(site.id, ips)
return site
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment