Last active
October 14, 2015 17:29
-
-
Save nojvek/27466ea46fc3fa4f4034 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
fs = require 'fs' | |
XXHash = require 'xxhash' | |
path = require 'path' | |
#dive = require 'dive' | |
HashStream = XXHash.Stream | |
c = console | |
startTime = Date.now() | |
mapFileName = 'pbix.json' | |
rootDir = '../pbix' | |
computeDirXXHashes = (rootDir, callback) -> | |
fileMap = {} | |
fileCount = 0 | |
hashCount = 0 | |
rootDir = fs.realpathSync(rootDir) | |
onFile = (err, path) -> | |
if err then return | |
fileCount += 1 | |
hasher = new HashStream(0xDEAD, 64, 'hex') | |
fs.createReadStream(path).pipe(hasher).on 'finish', -> | |
hash = hasher.read() | |
path = path.substr(rootDir.length) | |
fileMap[path] = hash | |
hashCount += 1 | |
if fileCount == hashCount | |
callback(fileMap) | |
onComplete = -> | |
elapsed = Date.now() - startTime | |
c.log 'elapsed', elapsed, "fileCount", fileCount | |
dive rootDir, onFile, onComplete | |
xxFileHash = (path, callback) -> | |
hasher = new HashStream(0xDEAD, 64, 'hex') | |
fs.createReadStream(path).pipe(hasher).on 'finish', -> | |
hash = hasher.read() | |
callback(hash) | |
isEqual = (oldObj, newObj) -> | |
if oldObj == newObj then return true | |
for key of oldObj | |
if oldObj[key] != newObj[key] then return false | |
return true | |
getMapDiff = (oldMap, newMap) -> | |
changed = [] | |
added = [] | |
removed = [] | |
for filePath of newMap | |
if oldMap.hasOwnProperty(filePath) | |
if not isEqual(oldMap[filePath], newMap[filePath]) | |
changed.push(filePath) | |
delete oldMap[filePath] | |
delete newMap[filePath] | |
else | |
added.push(filePath) | |
for filePath of oldMap | |
removed.push(filePath) | |
return {added: added, changed:changed, removed:removed} | |
dirStats = (rootDir, opts = {}, callback) -> | |
recursive = if opts.recursive!=undefined then opts.recursive else true; | |
files = opts.files || true; | |
directories = opts.directories || false; | |
statsMap = {} | |
realRootDir = fs.realpathSync(rootDir) | |
todo = 0 | |
totalSize = 0 | |
startTime = Date.now() | |
dive = (dir) -> | |
todo++ | |
fs.readdir dir, (err, list) -> | |
--todo | |
if err then return | |
list.forEach (file) -> | |
fullPath = path.resolve(dir, file) | |
todo++ | |
fs.lstat fullPath, (err, stat) -> | |
todo-- | |
if err then return | |
relativePath = fullPath.substr(realRootDir.length) | |
statObj = | |
lastmod: stat.mtime.getTime() | |
size: stat.size | |
totalSize += stat.size | |
if stat.isDirectory() | |
if file[0] == '.' then return | |
if directories then statsMap[relativePath] = statObj | |
if recursive then dive(fullPath) | |
else if stat.isFile() | |
if files then statsMap[relativePath] = statObj | |
if todo == 0 | |
elapsed = Date.now() - startTime | |
files = Object.keys(statsMap).length | |
console.log "elapsed", elapsed, "files", files, "size", totalSize | |
if callback then callback(statsMap) | |
#console.log statsMap | |
return | |
return | |
return | |
dive(rootDir) | |
return | |
mapFileName = 'pbix.json' | |
rootDir = '../pbix' | |
# computeDirXXHashes '../pbix', (newMap) -> | |
# oldMap = JSON.parse(fs.readFileSync(mapFileName, 'utf-8')) | |
# fs.writeFile mapFileName, JSON.stringify(newMap, null, '\t'), -> | |
# c.log "written elapsed", Date.now() - startTime | |
# diffs = getMapDiff(oldMap, newMap) | |
# c.log diffs | |
computeHashes = (filePaths, rootDir, callback) -> | |
todo = 0 | |
for filePath in filePaths | |
todo++ | |
xxFileHash rootDir + filePath, (hash) -> | |
todo-- | |
hashCache[filePath] = hash | |
if todo == 0 then callback() | |
if todo == 0 then callback() | |
hashCache = {} | |
hashCacheFile = 'hashCache.json' | |
if fs.exists('hashCache.json') | |
hashCache = JSON.parse(fs.readFileSync(hashCacheFile, 'utf-8')) | |
dirStats rootDir, recursive:true, (newStatsMap) -> | |
oldStatsMap = JSON.parse(fs.readFileSync(mapFileName, 'utf-8')) | |
fs.writeFile mapFileName, JSON.stringify(newStatsMap, null, '\t') | |
diffs = getMapDiff(oldStatsMap, newStatsMap) | |
computeHashes diffs.changed, rootDir, -> | |
fs.writeFile hashCacheFile, JSON.stringify(hashCache, null, '\t') | |
c.log diffs | |
# watcher = fs.watch rootDir, recursive:true, (event, filename) -> | |
# c.log "watch", event, filename |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment