Skip to content

Instantly share code, notes, and snippets.

@LynMoe
Last active October 3, 2020 07:19
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save LynMoe/167cf7cee15574f425465305edbfaf48 to your computer and use it in GitHub Desktop.
Save LynMoe/167cf7cee15574f425465305edbfaf48 to your computer and use it in GitHub Desktop.
Hexo Compressor
const _ = require('lodash')
const fs = require('fs')
const sha1 = require('sha1')
const path = require('path')
const glob = require("glob")
const sharp = require('sharp')
const fetch = require('node-fetch')
const rimraf = require("rimraf")
const imagemin = require('imagemin')
const domParser = require('dom-parser')
const parser = new domParser()
const hashFiles = require('hash-files')
const imageminPngquant = require('imagemin-pngquant')
const csso = require('@node-minify/csso')
const minify = require('@node-minify/core')
const PromiseQueue = require('promise-queue-plus')
// const htmlMinifier = require('@node-minify/html-minifier')
const gcc = require('@node-minify/google-closure-compiler')
const terser = require("terser")
const htmlMinifier = require('html-minifier').minify
let queue = PromiseQueue(1, {
autoRun: true
})
const __root = path.normalize(__dirname + '/public')
const genFilenameWithoutExtName = (hash, fullPath = true) => {
return ((fullPath) ? __root : '') + '/dist/' + hash.substr(0, 2) + '/' + hash
}
const hashMap = {}
const dbData = {
styleHash: '',
moduleHash: '',
dbHash: '',
db: {}
}
queue.add((resolve) => {
glob(__root + '/**/*.*', async (error, matches) => {
let minQueue = PromiseQueue(8, {
autoRun: true,
queueEnd: function () {
;['/images'].forEach((d) => rimraf.sync(__root + d))
console.log(hashMap)
if (process.env.MIN_TYPE === 'image') {
fs.renameSync(__root + '/dist', __root + '/../tmpDist')
rimraf.sync(__root)
fs.mkdirSync(__root)
fs.renameSync(__root + '/../tmpDist', __root + '/dist')
}
resolve()
}
})
for (let i in matches) {
const filename = matches[i]
const oriFilename = filename.replace(__root, '')
if (oriFilename.startsWith('/dist') && oriFilename.length > 40) continue
const fileHash = hashFiles.sync({
files: [ filename ]
})
const newFilename = genFilenameWithoutExtName(fileHash)
const extName = path.extname(filename)
switch (extName) {
case '.png':
case '.jpg':
case '.jpeg':
case '.gif':
case '.tiff':
minQueue.add(async (r) => {
if (fs.existsSync(newFilename + '.png')) {
hashMap[oriFilename] = fileHash
r()
return
}
console.log('Processing file: ' + path.basename(filename) + ` (Hash: ${fileHash})`)
fs.mkdirSync(path.dirname(newFilename), {
recursive: true
})
await sharp(filename).resize({
width: 1920,
height: 1920,
fit: 'inside',
withoutEnlargement: true,
}).webp({
quality: 40,
alphaQuality: 60,
reductionEffort: 6,
}).toFile(newFilename + '.webp')
let buffer = await sharp(filename).resize({
width: 1920,
height: 1920,
fit: 'inside',
withoutEnlargement: true,
}).png({
quality: 30,
progressive: true,
}).toBuffer()
buffer = await imagemin.buffer(buffer, {
plugins: [
imageminPngquant({
quality: [0.4, 0.6]
}),
],
})
fs.writeFileSync(newFilename + '.png', buffer)
hashMap[oriFilename] = fileHash
r()
})
break
// case '.html':
// minQueue.add((r) => {
// minify({
// compressor: htmlMinifier,
// input: filename,
// output: filename,
// replaceInPlace: true,
// options: {
// minifyCSS: true,
// minifyJS: true,
// removeComments: true,
// removeTagWhitespace: false,
// collapseWhitespace: true,
// conservativeCollapse: true,
// },
// callback: (err, min) => {
// if (err) console.error(err)
// r()
// },
// })
// })
// break
case '.css':
if (process.env.MIN_TYPE === 'image') continue
minQueue.add((r) => {
console.log('Processing file: ' + path.basename(filename) + ` (Hash: ${fileHash})`)
if (filename.endsWith('dist/build.css')) dbData.styleHash = fileHash
fs.mkdirSync(path.dirname(newFilename), {
recursive: true
})
minify({
compressor: csso,
input: filename,
output: genFilenameWithoutExtName(fileHash) + '.css',
replaceInPlace: true,
callback: (err, min) => {
if (err) console.error(err)
r()
},
})
hashMap[oriFilename] = fileHash
})
break
case '.js':
if (process.env.MIN_TYPE === 'image') continue
minQueue.add((r) => {
console.log('Processing file: ' + path.basename(filename) + ` (Hash: ${fileHash})`)
if (filename.endsWith('dist/build.js')) dbData.moduleHash = fileHash
fs.mkdirSync(path.dirname(newFilename), {
recursive: true
})
const filecontent = fs.readFileSync(filename).toString()
const result = terser.minify(filecontent)
fs.writeFileSync((path.basename(filename) === 'sw.js') ? __root + '/sw.js' : genFilenameWithoutExtName(fileHash) + '.js', result.code)
hashMap[oriFilename] = fileHash
r()
// minify({
// compressor: gcc,
// input: filename,
// output: filename,
// replaceInPlace: true,
// callback: (err, min) => {
// if (err) console.error(err)
// r()
// },
// })
})
break
default:
// fs.renameSync(filename, newFilename + extName)
break
}
}
})
})
queue.add((resolve) => {
if (process.env.MIN_TYPE === 'image') return
let customName, customHash, swHash
for (const filename in hashMap) {
if (filename.endsWith('dist/custom.js')) {
customName = filename
customHash = hashMap[filename]
} else if (filename.endsWith('sw.js')) {
swHash = hashMap[filename]
}
}
customName = genFilenameWithoutExtName(customHash) + '.js'
console.log(customName, customHash, swHash)
const content = fs.readFileSync(customName).toString()
fs.writeFileSync(customName, content.split('<<!sw.js-placeholder!>>').join(genFilenameWithoutExtName(swHash, false) + '.js'))
resolve()
})
queue.add((resolve) => {
if (process.env.MIN_TYPE === 'image') return
glob(__root + '/**/*.html', async (error, matches) => {
for (let i in matches) {
let filename = matches[i]
let html = fs.readFileSync(filename).toString()
let dom = parser.parseFromString(html)
let imgs = []
let images = dom.getElementsByTagName('img')
for (let k in images) {
let v = images[k]
let src = ''
for (let a in v.attributes) {
let item = v.attributes[a]
if (item.name === 'src') src = item.value
}
if (src) {
let s = decodeURIComponent(`${src}`)
if (s.startsWith('/../images/')) s = s.replace('/../images/', '/images/')
else if (s.startsWith('../images/')) s = s.replace('../images/', '/images/')
else continue
imgs.push(s)
}
}
_.uniq(imgs).forEach((item) => {
html = html.split(item).join(genFilenameWithoutExtName(hashMap[item], false) + '.webp')
})
let ss = []
let scripts = dom.getElementsByTagName('script')
for (let k in scripts) {
let v = scripts[k]
let src = ''
for (let a in v.attributes) {
let item = v.attributes[a]
if (item.name === 'src') src = item.value
}
if (src) {
let s = decodeURIComponent(`${src}`)
s = s.replace('?' + s.split('?')[s.split('?').length - 1], '')
ss.push(s)
}
}
_.uniq(ss).forEach((item) => {
console.log(item)
html = html.split(item).join(genFilenameWithoutExtName(hashMap[item], false) + '.js')
})
let css = []
let csss = dom.getElementsByTagName('link')
for (let k in csss) {
let v = csss[k]
let src = ''
for (let a in v.attributes) {
let item = v.attributes[a]
if (item.name === 'href') src = item.value
}
if (src) {
let s = decodeURIComponent(`${src}`)
s = s.replace('?' + s.split('?')[s.split('?').length - 1], '')
if (!s.endsWith('css')) continue
css.push(s)
}
}
_.uniq(css).forEach((item) => {
html = html.split(item).join(genFilenameWithoutExtName(hashMap[item], false) + '.css')
})
fs.writeFileSync(filename, html)
dbData.db[filename] = html
}
resolve()
})
})
queue.add((resolve) => {
if (process.env.MIN_TYPE === 'image') return
glob(__root + '/**/*.*', async (error, matches) => {
let minQueue = PromiseQueue(1, {
autoRun: true
})
for (let i in matches) {
const filename = matches[i]
const oriFilename = filename.replace(__root, '')
if (oriFilename.startsWith('/dist') && oriFilename.length > 40) continue
const fileHash = hashFiles.sync({
files: [ filename ]
})
const newFilename = genFilenameWithoutExtName(fileHash)
const extName = path.extname(filename)
switch (extName) {
case '.html':
console.log('Processing file: ' + path.basename(filename) + ` (Hash: ${fileHash})`)
const content = fs.readFileSync(filename).toString()
const result = htmlMinifier(content, {
minifyCSS: true,
minifyJS: true,
conservativeCollapse: false,
collapseWhitespace: true,
removeTagWhitespace: true,
removeComments: true,
})
fs.writeFileSync(filename, result)
dbData.db[filename] = result
break
default:
// fs.renameSync(filename, newFilename + extName)
break
}
}
await (new Promise((r) => {
minQueue.add(() => {
r()
})
}))
;['/images/'].forEach((d) => rimraf.sync(__root + d))
console.log(hashMap)
resolve()
})
})
queue.add((resolve) => {
;['/javascripts', '/modules', '/stylesheets'].forEach((d) => rimraf.sync(__root + d))
resolve()
})
queue.add((resolve) => {
const hashes = {}
glob(__root + '/**/*.*', async (error, matches) => {
for (let i in matches) {
const oriFilename = matches[i]
const filename = oriFilename.replace(__root, '')
const fileHash = hashFiles.sync({
files: [ oriFilename ]
})
hashes[filename] = fileHash
}
fs.writeFileSync(__root + '/fileHashes.json', JSON.stringify(hashes))
const data = await fetch('https://www.xiaolin.in/fileHashes.json')
if (parseInt(data.status) === 200) {
const fileList = await data.json()
for (const i in hashes) {
console.log(i, hashes[i], fileList[i])
if (hashes[i] === fileList[i]) {
console.log('remove', i)
fs.unlinkSync(__root + i)
}
}
}
{
if (!fs.existsSync(__root + '/sw')) fs.mkdirSync(__root + '/sw')
if (!fs.existsSync(__root + '/sw/data')) fs.mkdirSync(__root + '/sw/data')
const db = JSON.stringify(dbData.db).split(__root).join('')
const hash = sha1(db)
fs.writeFileSync(__root + `/sw/data/${hash.substring(0, 6)}.json`, db)
delete dbData.db
dbData.dbHash = hash
fs.writeFileSync(__root + '/sw/hash.json', JSON.stringify(dbData))
}
resolve()
})
})
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment