Skip to content

Instantly share code, notes, and snippets.

@jaidetree
Last active February 7, 2023 08:11
Show Gist options
  • Save jaidetree/e70a1b51b1fffe336d1ed7736afeeca4 to your computer and use it in GitHub Desktop.
Save jaidetree/e70a1b51b1fffe336d1ed7736afeeca4 to your computer and use it in GitHub Desktop.
#!/usr/bin/env node
const { Readable, Writable, PassThrough } = require('stream')
const fs = require('fs')
const path = require('path')
const chokidar = require('chokidar')
const { opendir } = require('fs/promises')
const stream = require('./lib/stream')
const flags = {
watch: process.argv.includes('--watch') || process.argv.includes('-w'),
}
/**
* Create a readable stream that emits files from a directory asynchronously.
* It is not recursive.
*
* @param {string} Directory path to read from
* @returns {Readable} A readable stream of file objects with the following keys
* {
* ext: string,
* filename: string,
* basename: string,
* path: string,
* }
*/
function readdirStream(directory) {
return stream.create((push, done) => {
const dirPromise = opendir(directory)
// Open the directory which is currently a promise
Promise.resolve(dirPromise)
// Asynchronously read values from the dir
.then(async dir => {
for await (const dirent of dir) {
if (dirent.isFile()) {
const filename = dirent.name
const ext = path.extname(filename)
push({
ext: ext.slice(1),
filename,
basename: path.basename(dirent.name, ext),
path: path.join(directory, filename),
})
}
}
})
// Finally signify this stream is done emitting values
.then(() => done())
})
}
/**
* Determine if file obj is within an accepted list of extensions
*
* @param {string[]} exts - List of exts like ["png", "svg", "tsx"]
* @param {object} file - A file object with a path, basename, filename, and ext
* @returns {boolean} True if file.ext is within allowed extensions
*/
function isImgFile(exts, file) {
return exts.includes(file.ext)
}
/**
* Transforms a string like cloud_hosted into IconCloudHosted
*
* @param {string} basename - A basename string without extension "always_on"
* @returns {string}
*/
function pascalCase(basename) {
const words = basename.split(/[-_\s]+/)
return words
.map(word => {
const char = word[0]
const chars = word.slice(1)
return `${char.toUpperCase()}${chars}`
})
.join('')
}
/**
* Format an image file export to be written to the index.ts
*
* @param {object} file - Incoming file object with a basename, filename, path
* and ext attrs
* @returns {string} A line of code to append to the index.ts
*/
function formatImgExport(file) {
switch (file.ext) {
case 'tsx':
return `export { default as ${file.basename} } from './${file.basename}'\n`
case 'png':
case 'jpg':
case 'svg':
return `export { default as img${
pascalCase(file.basename)
} } from './${file.filename}'\n`
default:
return file.filename
}
}
/**
* Returns a finite stream of img files or an infinite chokidar watcher stream
*
* @param {string} globStr - The glob string to watch
* @returns {Readable} Readable stream of files
*/
function streamImgFiles(globStr) {
return stream.create((push, done) => {
// Start by watching directories
const dirGlob = path.dirname(globStr)
const ignorePattern = /\/?index.tsx?$/
let watcher = chokidar.watch(dirGlob, {
persistent: false,
ignored: ignorePattern,
})
// Capture directories on initiaize
const onAddDir = directory => push(directory)
watcher.on('addDir', onAddDir)
watcher.once('ready', () => {
const onChange = filepath => push(path.dirname(filepath))
// Stop listening for directory events and instead watch for updated
// img files or new img files
if (flags.watch) {
watcher.unwatch(dirGlob)
watcher.close()
watcher = chokidar.watch(globStr, {
ignoreInitial: true,
ignored: ignorePattern,
})
watcher.on('add', onChange)
watcher.on('unlink', onChange)
return
}
// Otherwise end the stream so the program exits
done()
})
return () => {
watcher.close()
}
})
}
/**
* Write img file exports to the index file. Appending means this script does
* not ever have to maintain an array of image files in memory
* @returns {Writable} A writable stream that finishes when all incoming files
* have been written to their target index.ts files
*/
function createIndexWriter() {
return new Writable({
objectMode: true,
write(file, encoding, callback) {
let isEmpty = true
const writeStream = new PassThrough({ objectMode: true })
// Avoids creating indexes for directories with no images
file.contents
.pipe(stream.once(() => {
isEmpty = false
writeStream
.pipe(fs.createWriteStream(file.index, {
encoding: 'utf-8',
flags: 'w+',
}))
.on('finish', () => {
console.log('Created index file', file.index)
callback()
})
}))
.pipe(writeStream)
// Call the callback for the next item if not going to build an index
// for this directory, like if it has no image files
.on('finish', () => {
if (isEmpty)
callback()
})
},
})
}
/**
* Create the chokidar stream to watch for svg files. Will fire immediately on
* any new svgs it finds
*/
streamImgFiles('./app/{components,route-assets}/**/*.{svg,png,jpg}')
.pipe(stream.map(directory => ({
directory,
index: path.join(directory, 'index.ts'),
contents: readdirStream(directory)
.pipe(
stream.filter(file =>
isImgFile(['svg', 'png', 'jpg', 'tsx'], file)
&& !file.path.includes('index.tsx')
),
)
.pipe(stream.map(formatImgExport)),
})))
.pipe(stream.filter(file => !file.index.includes('components/index.ts')))
.pipe(createIndexWriter())
.on('finish', () => {
console.log(`Generated indexes`)
})
.on('error', console.error)
const { Readable, Transform, PassThrough } = require('stream')
/**
* Creates a map transform stream to read values from a readable, transforms
* the value with a given mapFn, and then emits the returned value downstream.
*
* @param {(x: any) => any} mapFn - Transform value into anything
* @returns {Transform} A transform stream that can be read from again
*/
exports.map = function map(mapFn) {
return new Transform({
objectMode: true,
transform(data, encoding, callback) {
this.push(mapFn(data))
callback()
},
})
}
/**
* Creates a filter transform stream to read values from a readable, tests each
* value, and if it passes the predicate it will emit the value down stream
*
* @param {(x: any) => boolean} filterFn - Predicate to filter stream values
* @returns {Transform} A transform stream that can be read from again
*/
exports.filter = function filter(filterFn) {
return new Transform({
objectMode: true,
transform(data, encoding, callback) {
if (filterFn(data)) {
this.push(data)
}
callback()
},
})
}
/**
* Creates a more specialized transform stream that reads values from a readable
* then uses mapFn to transform the value into another stream and emits those
* values down stream
*
* @NOTE Typically flatMap streams will emit values based on the first substream
* to emit a value. This implementation waits until the returned readable
* finishes before moving onto the next value.
* @param {(x: any) => Readable} mapFn - Transform value into a readable stream
* @returns {Transform} A transform stream that can be read from again
*/
exports.flatMap = function flatMap(mapFn) {
return new Transform({
objectMode: true,
async transform(data, encoding, callback) {
const readable = mapFn(data)
if (!readable instanceof Readable) {
callback(
new Error(
`flatMap: Returned value was not a readable stream, got ${typeof readable}`,
),
)
return
}
for await (const chunk of readable) {
this.push(chunk)
}
callback()
},
})
}
/**
* Takes n number of values from the start of the stream. Closes after n items
* are pushed.
*
* @param {number} n - Number of items to emit down stream from first value
*/
exports.take = function take(n) {
return new Transform({
objectMode: true,
construct(callback) {
this._count = 0
callback()
},
transform(chunk, encoding, callback) {
if (this._count < n) {
this._count++
this.push(chunk)
}
if (this._count === n) {
this.push(null)
}
callback()
},
})
}
/**
* A stream that emits all values it receives within a given window. The window
* opens on the first item and then closes after the given ms. A new window
* will be created on next value
*
* @param {number} ms - Number of milliseconds to wait before emitting the
* buffer.
*/
exports.bufferWithTime = function bufferWithTime(ms) {
return new Transform({
objectMode: true,
construct(callback) {
this._buffer = []
this._timeout = null
callback()
},
async transform(data, encoding, callback) {
if (!this._timeout) {
this._timeout = setTimeout(() => {
this.push(this._buffer)
this._timeout = null
}, ms)
}
this._buffer.push(data)
callback()
},
flush(callback) {
this.push(this._buffer)
this._buffer = []
if (this._timeout) clearTimeout(this._timeout)
callback()
},
})
}
/**
* Fire a side-effect once per stream. Only fires on first upstream data.
* For example useful for only creating index.ts only for directories with at
* least one image file in them.
*
* @param {(onceFn: (data: unknown) => void)} onceFn - Effect function to call
* on first upstream data
* @returns {Transform} A readable and writable stream of the original data
*/
exports.once = function once(onceFn) {
return new Transform({
objectMode: true,
construct(callback) {
this._dispatched = false
callback()
},
transform(data, encoding, callback) {
if (!this._dispatched) {
this._dispatched = true
onceFn(data)
}
callback(null, data)
},
})
}
/**
* Create a stream with a simplified constructor function can be used to bind
* any async or sync resource.
* @param {(
* push: (value: any) => void,
* done: () => void
* ) =>
* () => void | undefined
* } streamFn - A constructor function for intializing a resource, emitting
* values downstream, then optionally returning a function to
* dispose that resource
* @returns {Readable} A readable stream that emits values anytime the streamFn
* pushes values to it.
*/
exports.create = function create(streamFn) {
let unsubscribe = null
let subscribe = () => {
const push = data => stream.write(data)
const done = () => {
stream.end()
if (unsubscribe) {
unsubscribe()
unsubscribe = null
}
}
unsubscribe = streamFn(push, done)
subscribe = () => {}
}
const stream = new PassThrough({
objectMode: true,
read(_size) {
subscribe()
},
})
return stream
}
#!/usr/bin/env node
const { Readable, Writable, PassThrough, Stream } = require('stream')
const fs = require('fs')
const path = require('path')
const chokidar = require('chokidar')
const stream = require('./lib/stream')
const svgr = require('@svgr/core')
const svgo = require('@svgr/plugin-svgo')
const jsx = require('@svgr/plugin-jsx')
const prettier = require('@svgr/plugin-prettier')
const flags = {
watch: process.argv.includes('--watch') || process.argv.includes('-w'),
}
/**
* Docs found at
* https://react-svgr.com/docs/options/#options
*
* @type {import('@svgr/core').Config}
*/
const svgrConfig = {
...svgo.DEFAULT_CONFIG,
dimensions: false,
expandProps: 'end',
exportType: 'default',
icon: false,
ignoreExisting: false,
index: false,
jsx: true,
memo: false,
namedExport: 'ReactComponent',
native: false,
outDir: 'app',
// Order here matters
// https://github.com/gregberge/svgr/blob/161d1b151534bb3234b4c28fa4a383c120228dfb/packages/cli/src/util.ts#L38
plugins: [svgo, jsx, prettier],
prettier: true,
ref: true,
replaceAttrValues: {
'#008cd0': 'currentColor',
'#008cd6': 'currentColor',
'#008cd7': 'currentColor',
},
runtimeConfig: true,
svgo: true,
titleProp: false,
typescript: true,
}
function streamImgFiles(globStr) {
return stream.create((push, done) => {
let watcher = chokidar.watch(globStr, {
persistent: flags.watch,
})
const onChange = filepath => push(filepath)
if (!flags.watch) {
watcher.once('ready', () => {
done()
})
}
watcher.on('add', onChange)
watcher.on('change', onChange)
// Unlinking is simple enough to handle here so may as well
watcher.on('unlink', filepath => {
const extname = path.extname(filepath)
const basename = path.basename(filepath, extname)
const tsxname = pascalCase(basename)
const tsxpath = path.join(path.dirname(filepath), tsxname + '.tsx')
fs.unlink(tsxpath, () => {
console.log('Deleted ', tsxpath)
})
})
return () => {
watcher.close()
}
})
}
/**
* Transforms a string like cloud_hosted into IconCloudHosted
*
* @param {string} basename - A basename string without extension "always_on"
* @returns {string}
*/
function pascalCase(basename) {
const words = basename.split(/[-_\s]+/)
return words
.map(word => {
const char = word[0]
const chars = word.slice(1)
return `${char.toUpperCase()}${chars}`
})
.join('')
}
function fromAsync(createPromise) {
return stream.create((push, done) => {
createPromise()
.then(result => {
push(result)
done()
})
.catch(error => {
console.error(error)
done()
})
})
}
function convertFile(file, src) {
return fromAsync(() =>
svgr.transform(src, svgrConfig, {
filePath: file.path,
componentName: 'Svg' + file.componentName,
caller: {},
})
)
}
/**
* Create the chokidar stream to watch for svg files. Will fire immediately on
* any new svgs it finds
*/
streamImgFiles('./app/{components,route-assets}/**/*.svg')
.pipe(stream.map(filepath => {
const basename = path.basename(filepath, path.extname(filepath))
const componentName = pascalCase(basename)
return {
componentName,
path: filepath,
destpath: path.join(path.dirname(filepath), componentName + '.tsx'),
contents: fs.createReadStream(filepath, { encoding: 'utf-8' }),
}
}))
.pipe(stream.map(file => ({
...file,
contents: file.contents
.pipe(stream.flatMap(src => convertFile(file, src))),
})))
.pipe(
new Writable({
objectMode: true,
write(file, encoding, callback) {
const writer = fs.createWriteStream(file.destpath)
writer.on('finish', () => {
process.stdout.write(
`Converted ${file.path} -> ${file.destpath}\n`,
)
callback()
})
file.contents.pipe(writer)
},
}),
)
.on('finish', () => {
console.log(`Generated react svgs`)
})
.on('error', console.error)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment