Skip to content

Instantly share code, notes, and snippets.

@rvagg
Created October 24, 2012 02:45
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save rvagg/3943404 to your computer and use it in GitHub Desktop.
Save rvagg/3943404 to your computer and use it in GitHub Desktop.
npm CouchDB registry/_design/app
{
"_id": "_design/app",
"_rev": "607-6efd9a48552b62b0e2f52b1851589ea2",
"views": {
"updated": {
"map": "function (doc) {\n var l = doc[\"dist-tags\"].latest\n , t = doc.time && doc.time[l]\n if (t) emit(t, 1)\n}"
},
"listAll": {
"map": "function (doc) { return emit(doc._id, doc) }"
},
"modified": {
"map": "function (doc) {\n if (!doc.versions || doc.deprecated) return\n var latest = doc[\"dist-tags\"].latest\n if (!doc.versions[latest]) return\n var time = doc.time && doc.time[latest] || 0\n var t = new Date(time)\n emit(t.getTime(), doc)\n}"
},
"byEngine": {
"map": "function (doc) {\n if (!doc || !doc.versions || !doc[\"dist-tags\"] || doc.deprecated) return\n var v = doc[\"dist-tags\"].latest\n var d = doc.versions[v]\n if (d && d.engines) emit(doc._id, [d.engines, doc.maintainers])\n }"
},
"countVersions": {
"map": "function (doc) {\n if (!doc || !doc.name || doc.deprecated) return\n var i = 0\n if (!doc.versions) return emit([i, doc._id], 1)\n for (var v in doc.versions) i++\n emit([i, doc._id], 1)\n}",
"reduce": "_sum"
},
"byKeyword": {
"map": "function (doc) {\n if (!doc || !doc.versions || !doc['dist-tags'] || doc.deprecated) return\n var v = doc.versions[doc['dist-tags'].latest]\n if (!v || !v.keywords || !Array.isArray(v.keywords)) return\n v.keywords.forEach(function (kw) {\n emit([kw.toLowerCase(), doc.name, doc.description], 1)\n })\n }",
"reduce": "_sum"
},
"byField": {
"map": "function (doc) {\n if (!doc || !doc.versions || !doc[\"dist-tags\"]) return\n var v = doc[\"dist-tags\"].latest\n //Object.keys(doc.versions).forEach(function (v) {\n var d = doc.versions[v]\n if (!d) return\n //emit(d.name + \"@\" + d.version, d.dist.bin || {})\n var out = {}\n for (var i in d) {\n out[i] = d[i] //true\n if (d[i] && typeof d[i] === \"object\" &&\n (i === \"scripts\" || i === \"directories\")) {\n for (var j in d[i]) out[i + \".\" + j] = d[i][j]\n }\n }\n out.maintainers = doc.maintainers\n emit(doc._id, out)\n //})\n }"
},
"needBuild": {
"map": "function (doc) {\n\n if (!doc || !doc.versions || !doc[\"dist-tags\"]) return\n var v = doc[\"dist-tags\"].latest\n //Object.keys(doc.versions).forEach(function (v) {\n var d = doc.versions[v]\n if (!d) return\n if (!d.scripts) return\n var inst = d.scripts.install\n || d.scripts.preinstall\n || d.scripts.postinstall\n if (!inst) return\n //emit(d.name + \"@\" + d.version, d.dist.bin || {})\n emit(d._id, d.dist.bin || {})\n //})\n }"
},
"scripts": {
"map": "function (doc) {\n if (!doc || !doc.versions || !doc[\"dist-tags\"]) return\n var v = doc[\"dist-tags\"].latest\n v = doc.versions[v]\n if (!v || !v.scripts) return\n var out = {}\n var any = false\n for (var i in v.scripts) {\n out[i] = v.scripts[i]\n any = true\n }\n if (!any) return\n out.maintainers = doc.maintainers\n emit(doc._id, out)\n }"
},
"nodeWafInstall": {
"map": "function (doc) {\n if (!doc || !doc.versions || !doc[\"dist-tags\"]) return\n var v = doc[\"dist-tags\"].latest\n if (!doc.versions[v]) return\n if (!doc.versions[v].scripts) return\n for (var i in doc.versions[v].scripts) {\n if (doc.versions[v].scripts[i].indexOf(\"node-waf\") !== -1 ||\n doc.versions[v].scripts[i].indexOf(\"make\") !== -1) {\n emit(doc._id, doc.versions[v]._id)\n return\n }\n }\n }"
},
"badBins": {
"map": "function (doc) {\n if (!doc || !doc.versions || !doc[\"dist-tags\"]) return\n var v = doc[\"dist-tags\"].latest\n if (!doc.versions[v]) return\n v = doc.versions[v]\n var b = v.bin\n , d = v.directories && v.directories.bin\n if (!b && !d) return\n if (b && (typeof b === \"string\" || Object.keys(b).length === 1)) {\n // it's ok.\n return\n }\n emit(doc._id, {binHash:b, binDir:d})\n }"
},
"orphanAttachments": {
"map": "function (doc) {\n if (!doc || !doc._attachments) return\n var orphans = []\n , size = 0\n for (var i in doc._attachments) {\n var n = i.substr(doc._id.length + 1).replace(/\\.tgz$/, \"\")\n .replace(/^v/, \"\")\n if (!doc.versions[n] && i.match(/\\.tgz$/)) {\n orphans.push(i)\n size += doc._attachments[i].length\n }\n }\n if (orphans.length) emit(doc._id, {size:size, orphans:orphans})\n }"
},
"starredByUser": {
"map": "function (doc) {\n if (!doc || !doc.users) return\n Object.keys(doc.users).forEach(function (m) {\n if (!doc.users[m]) return\n emit(m, doc._id)\n })\n}"
},
"starredByPackage": {
"map": "function (doc) {\n if (!doc || !doc.users) return\n Object.keys(doc.users).forEach(function (m) {\n if (!doc.users[m]) return\n emit(doc._id, m)\n })\n}"
},
"byUser": {
"map": "function (doc) {\n if (!doc || !doc.maintainers) return\n doc.maintainers.forEach(function (m) {\n emit(m.name, doc._id)\n })\n}"
},
"npmTop": {
"map": "function (doc) {\n if (!doc || !doc.maintainers || doc.deprecated) return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n l = l && doc.versions && doc.versions[l]\n if (!l) return\n var desc = doc.description || l.description || ''\n var readme = doc.readme || l.readme || ''\n doc.maintainers.forEach(function (m) {\n emit([m.name, doc._id, desc, readme], 1)\n })\n}",
"reduce": "_sum"
},
"browseAuthors": {
"map": "function (doc) {\n if (!doc || !doc.maintainers || doc.deprecated) return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n l = l && doc.versions && doc.versions[l]\n if (!l) return\n var desc = doc.description || l.description || ''\n var readme = doc.readme || l.readme || ''\n doc.maintainers.forEach(function (m) {\n emit([m.name, doc._id, desc, readme], 1)\n })\n}",
"reduce": "_sum"
},
"browseUpdated": {
"map": "function (doc) {\n if (!doc || !doc.versions || doc.deprecated) return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n if (!l) return\n var t = doc.time && doc.time[l]\n if (!t) return\n var v = doc.versions[l]\n if (!v) return\n var d = new Date(t)\n if (!d.getTime()) return\n emit([ d.toISOString(),\n doc._id,\n v.description,\n v.readme ], 1)\n}",
"reduce": "_sum"
},
"browseAll": {
"map": "function (doc) {\n if (!doc || !doc.versions || doc.deprecated) return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n if (!l) return\n l = doc.versions && doc.versions[l]\n if (!l) return\n var desc = doc.description || l.description || ''\n var readme = doc.readme || l.readme || ''\n emit([doc.name, desc, readme], 1)\n}",
"reduce": "_sum"
},
"analytics": {
"map": "function (doc) {\n if (!doc || !doc.time || doc.deprecated) return\n for (var i in doc.time) {\n var t = doc.time[i]\n var d = new Date(t)\n if (!d.getTime()) return\n var type = i === 'modified' ? 'latest'\n : i === 'created' ? 'created'\n : 'update'\n emit([ type,\n d.getUTCFullYear(),\n d.getUTCMonth() + 1,\n d.getUTCDate(),\n doc._id ], 1)\n }\n}",
"reduce": "_sum"
},
"dependedUpon": {
"map": "function (doc) {\n if (!doc || doc.deprecated) return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n if (!l) return\n l = doc.versions && doc.versions[l]\n if (!l) return\n var desc = doc.description || l.description || ''\n var readme = doc.readme || l.readme || ''\n var d = l.dependencies\n if (!d) return\n for (var dep in d) {\n emit([dep, doc._id, desc, readme], 1)\n }\n}",
"reduce": "_sum"
},
"browseStarUser": {
"map": "function (doc) {\n if (!doc) return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n if (!l) return\n l = doc.versions && doc.versions[l]\n if (!l) return\n var desc = doc.description || l.description || ''\n var readme = doc.readme || l.readme || ''\n var d = doc.users\n if (!d) return\n for (var user in d) {\n emit([user, doc._id, desc, readme], 1)\n }\n}",
"reduce": "_sum"
},
"browseStarPackage": {
"map": "function (doc) {\n if (!doc || doc.deprecated) return\n var l = doc['dist-tags'] && doc['dist-tags'].latest\n if (!l) return\n l = doc.versions && doc.versions[l]\n if (!l) return\n var desc = doc.description || l.description || ''\n var readme = doc.readme || l.readme || ''\n var d = doc.users\n if (!d) return\n for (var user in d) {\n emit([doc._id, desc, user, readme], 1)\n }\n}",
"reduce": "_sum"
},
"fieldsInUse": {
"map": "function (doc) {\n if (!doc.versions || !doc[\"dist-tags\"] || !doc[\"dist-tags\"].latest || doc.deprecated) {\n return\n }\n var d = doc.versions[doc[\"dist-tags\"].latest]\n if (!d) return\n for (var f in d) {\n emit(f, 1)\n if (d[f] && typeof d[f] === \"object\" &&\n (f === \"scripts\" || f === \"directories\")) {\n for (var i in d[f]) emit(f+\".\"+i, 1)\n }\n }\n}",
"reduce": "_sum"
},
"howBigIsYourPackage": {
"map": "function (doc) {\n var s = 0\n , c = 0\n if (!doc) return\n for (var i in doc._attachments) {\n s += doc._attachments[i].length\n c ++\n }\n if (s === 0) return\n emit(doc._id, {_id: doc._id, size: s, count: c, avg: s/c})\n }"
}
},
"shows": {
"package": "function (doc, req) {\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n\n var semver = require(\"semver\")\n , code = 200\n , headers = {\"Content-Type\":\"application/json\"}\n , body = null\n\n delete doc.ctime\n delete doc.mtime\n if (doc.versions) Object.keys(doc.versions).forEach(function (v) {\n delete doc.versions[v].ctime\n delete doc.versions[v].mtime\n })\n\n // legacy kludge\n if (doc.versions) for (var v in doc.versions) {\n var clean = semver.clean(v)\n doc.versions[v].directories = doc.versions[v].directories || {}\n if (clean !== v) {\n var p = doc.versions[v]\n delete doc.versions[v]\n p.version = v = clean\n doc.versions[clean] = p\n }\n if (doc.versions[v].dist.tarball) {\n\n // make it point at THIS registry that is being requested,\n // with the full /db/_design/app/_rewrite if that is being used,\n // or just the /name if not.\n\n var t = doc.versions[v].dist.tarball\n t = t.replace(/^https?:\\/\\/[^\\/:]+(:[0-9]+)?/, '')\n\n var requestedPath = req.requested_path\n // workaround for old couch versions that didn't\n // have requested_path\n if (requestedPath && -1 === requestedPath.indexOf('show'))\n requestedPath = requestedPath.slice(0)\n else {\n var path = req.path\n if (path) {\n var i = path.indexOf('_show')\n if (i !== -1) {\n requestedPath = path.slice(0)\n requestedPath.splice(i, i + 2, '_rewrite')\n }\n } else return {\n code : 500,\n body : JSON.stringify({error: 'bad couch'}),\n headers : headers\n }\n }\n\n // doc.versions[v].dist._origTarball = doc.versions[v].dist.tarball\n // doc.versions[v].dist._headers = req.headers\n // doc.versions[v].dist._query = req.query\n // doc.versions[v].dist._reqPath = req.requested_path\n // doc.versions[v].dist._path = req.path\n // doc.versions[v].dist._t = t.slice(0)\n\n // actual location of tarball should always be:\n // .../_rewrite/pkg/-/pkg-version.tgz\n // or: /pkg/-/pkg-version.tgz\n // depending on what requested path is.\n var tf = [doc.name, '-', t.split('/').pop()]\n var i = requestedPath.indexOf('_rewrite')\n if (i !== -1) {\n tf = requestedPath.slice(0, i + 1).concat(tf)\n }\n t = '/' + tf.join('/')\n var h = \"http://\" + req.headers.Host\n\n doc.versions[v].dist.tarball = h + t\n }\n }\n if (doc[\"dist-tags\"]) for (var tag in doc[\"dist-tags\"]) {\n var clean = semver.clean(doc[\"dist-tags\"][tag])\n if (!clean) delete doc[\"dist-tags\"][tag]\n else doc[\"dist-tags\"][tag] = clean\n }\n // end kludge\n\n if (req.query.version) {\n var ver = req.query.version\n // if not a valid version, then treat as a tag.\n if ((!(ver in doc.versions) && (ver in doc[\"dist-tags\"]))\n || !semver.valid(ver)) {\n ver = doc[\"dist-tags\"][ver]\n }\n body = doc.versions[ver]\n if (!body) {\n code = 404\n body = {\"error\" : \"version not found: \"+req.query.version}\n }\n } else {\n body = doc\n for (var i in body) if (i.charAt(0) === \"_\" && i !== \"_id\" && i !== \"_rev\") {\n delete body[i]\n }\n for (var i in body.time) {\n if (!body.versions[i]) delete body.time[i]\n else body.time[i] = new Date(Date.parse(body.time[i])).toISOString()\n }\n }\n\n body = req.query.jsonp\n ? req.query.jsonp + \"(\" + JSON.stringify(body) + \")\"\n : toJSON(body)\n\n return {\n code : code,\n body : body,\n headers : headers\n }\n}"
},
"updates": {
"package": "function (doc, req) {\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n\n var semver = require(\"semver\")\n var valid = require(\"valid\")\n function error (reason) {\n return [{_id: \"error: forbidden\", forbidden:reason}, JSON.stringify({forbidden:reason})]\n }\n\n function ok (doc, message) {\n delete doc.mtime\n delete doc.ctime\n var time = doc.time = doc.time || {}\n time.modified = (new Date()).toISOString()\n time.created = time.created || time.modified\n for (var v in doc.versions) {\n var ver = doc.versions[v]\n delete ver.ctime\n delete ver.mtime\n time[v] = time[v] || (new Date()).toISOString()\n }\n return [doc, JSON.stringify({ok:message})]\n }\n\n if (doc) {\n if (req.query.version) {\n var parsed = semver.valid(req.query.version)\n if (!parsed) {\n // it's a tag.\n var tag = req.query.version\n , ver = JSON.parse(req.body)\n if (!semver.valid(ver)) {\n return error(\"setting tag \"+tag+\" to invalid version: \"+req.body)\n }\n doc[\"dist-tags\"][tag] = semver.clean(ver)\n return ok(doc, \"updated tag\")\n }\n // adding a new version.\n var ver = req.query.version\n if (!semver.valid(ver)) {\n return error(\"invalid version: \"+ver)\n }\n\n if ((ver in doc.versions) || (semver.clean(ver) in doc.versions)) {\n // attempting to overwrite an existing version.\n // not supported at this time.\n if (!req.query.rev || req.query.rev !== doc._rev) {\n return error(\"cannot modify existing version\")\n }\n }\n\n var body = JSON.parse(req.body)\n if (!valid.name(body.name)) {\n return error( \"Invalid name: \"+JSON.stringify(body.name))\n }\n body.version = semver.clean(body.version)\n ver = semver.clean(ver)\n if (body.version !== ver) {\n return error( \"version in doc doesn't match version in request: \"\n + JSON.stringify(body.version)\n + \" !== \" + JSON.stringify(ver) )\n }\n body._id = body.name + \"@\" + body.version\n if (body.description) doc.description = body.description\n if (body.author) doc.author = body.author\n if (body.repository) doc.repository = body.repository\n body.maintainers = doc.maintainers\n\n if (body.publishConfig && typeof body.publishConfig === 'object') {\n Object.keys(body.publishConfig).filter(function (k) {\n return k.match(/^_/)\n }).forEach(function (k) {\n delete body.publishConfig[k]\n })\n }\n\n var tag = req.query.tag\n || (body.publishConfig && body.publishConfig.tag)\n || body.tag\n || \"latest\"\n\n if (!req.query.pre)\n doc[\"dist-tags\"][tag] = body.version\n if (!doc[\"dist-tags\"].latest)\n doc[\"dist-tags\"].latest = body.version\n doc.versions[ver] = body\n doc.time = doc.time || {}\n doc.time[ver] = (new Date()).toISOString()\n return ok(doc, \"added version\")\n }\n\n // update the package info\n var newdoc = JSON.parse(req.body)\n , changed = false\n if (doc._rev && doc._rev !== newdoc._rev) {\n return error( \"must supply latest _rev to update existing package\" )\n }\n for (var i in newdoc) if (typeof newdoc[i] === \"string\" || i === \"maintainers\") {\n doc[i] = newdoc[i]\n }\n if (newdoc.versions) {\n doc.versions = newdoc.versions\n }\n if (newdoc[\"dist-tags\"]) {\n doc[\"dist-tags\"] = newdoc[\"dist-tags\"]\n }\n if (newdoc.users) {\n if (!doc.users) doc.users = {}\n doc.users[req.userCtx.name] = newdoc.users[req.userCtx.name]\n }\n return ok(doc, \"updated package metadata\")\n } else {\n // Create new package doc\n doc = JSON.parse(req.body)\n if (!doc._id) doc._id = doc.name\n if (!doc.versions) doc.versions = {}\n var latest\n for (var v in doc.versions) {\n if (!semver.valid(v)) return error(\"Invalid version: \"+JSON.stringify(v))\n var p = doc.versions[v]\n if (p.version !== v) return error(\"Version mismatch: \"+JSON.stringify(v)\n +\" !== \"+JSON.stringify(p.version))\n if (!valid.name(p.name)) return error(\"Invalid name: \"+JSON.stringify(p.name))\n latest = semver.clean(v)\n }\n if (!doc['dist-tags']) doc['dist-tags'] = {}\n if (latest) doc[\"dist-tags\"].latest = latest\n return ok(doc, \"created new entry\")\n }\n}"
},
"rewrites": [
{
"from": "/",
"to": "../../../registry",
"method": "GET"
},
{
"from": "/-/jsonp/:jsonp",
"to": "_list/short/listAll",
"method": "GET"
},
{
"from": "/_session",
"to": "../../../_session",
"method": "GET"
},
{
"from": "/_session",
"to": "../../../_session",
"method": "PUT"
},
{
"from": "/_session",
"to": "../../../_session",
"method": "POST"
},
{
"from": "/_session",
"to": "../../../_session",
"method": "DELETE"
},
{
"from": "/_session",
"to": "../../../_session",
"method": "HEAD"
},
{
"from": "/-/all/since",
"to": "_list/index/modified",
"method": "GET"
},
{
"from": "/-/rss",
"to": "_list/rss/modified",
"method": "GET"
},
{
"from": "/-/all",
"to": "_list/index/listAll",
"method": "GET"
},
{
"from": "/-/all/-/jsonp/:jsonp",
"to": "_list/index/listAll",
"method": "GET"
},
{
"from": "/-/short",
"to": "_list/short/listAll",
"method": "GET"
},
{
"from": "/-/scripts",
"to": "_list/scripts/scripts",
"method": "GET"
},
{
"from": "/-/by-field",
"to": "_list/byField/byField",
"method": "GET"
},
{
"from": "/-/fields",
"to": "_list/sortCount/fieldsInUse",
"method": "GET",
"query": {
"group": "true"
}
},
{
"from": "/-/needbuild",
"to": "_list/needBuild/needBuild",
"method": "GET"
},
{
"from": "/-/prebuilt",
"to": "_list/preBuilt/needBuild",
"method": "GET"
},
{
"from": "/-/nonlocal",
"to": "_list/short/nonlocal",
"method": "GET"
},
{
"from": "/favicon.ico",
"to": "../../npm/favicon.ico",
"method": "GET"
},
{
"from": "/-/users",
"to": "../../../_users/_design/_auth/_list/index/listAll",
"method": "GET"
},
{
"from": "/-/user/:user",
"to": "../../../_users/:user",
"method": "PUT"
},
{
"from": "/-/user/:user/-rev/:rev",
"to": "../../../_users/:user",
"method": "PUT"
},
{
"from": "/-/user/:user",
"to": "../../../_users/:user",
"method": "GET"
},
{
"from": "/_users/:user",
"to": "../../../_users/:user",
"method": "PUT"
},
{
"from": "/_users/:user",
"to": "../../../_users/:user",
"method": "GET"
},
{
"from": "/public_users/:user",
"to": "../../../public_users/:user",
"method": "PUT"
},
{
"from": "/public_users/:user",
"to": "../../../public_users/:user",
"method": "GET"
},
{
"from": "/-/user-by-email/:email",
"to": "../../../_users/_design/_auth/_list/email/listAll",
"method": "GET"
},
{
"from": "/-/top",
"to": "_view/npmTop",
"query": {
"group_level": 1
},
"method": "GET"
},
{
"from": "/-/by-user/:user",
"to": "_list/byUser/byUser",
"method": "GET"
},
{
"from": "/-/starred-by-user/:user",
"to": "_list/byUser/starredByUser",
"method": "GET"
},
{
"from": "/-/starred-by-package/:user",
"to": "_list/byUser/starredByPackage",
"method": "GET"
},
{
"from": "/:pkg",
"to": "/_show/package/:pkg",
"method": "GET"
},
{
"from": "/:pkg/-/jsonp/:jsonp",
"to": "/_show/package/:pkg",
"method": "GET"
},
{
"from": "/:pkg/:version",
"to": "_show/package/:pkg",
"method": "GET"
},
{
"from": "/:pkg/:version/-/jsonp/:jsonp",
"to": "_show/package/:pkg",
"method": "GET"
},
{
"from": "/:pkg/-/:att",
"to": "../../:pkg/:att",
"method": "GET"
},
{
"from": "/:pkg/-/:att/:rev",
"to": "../../:pkg/:att",
"method": "PUT"
},
{
"from": "/:pkg/-/:att/-rev/:rev",
"to": "../../:pkg/:att",
"method": "PUT"
},
{
"from": "/:pkg/-/:att/:rev",
"to": "../../:pkg/:att",
"method": "DELETE"
},
{
"from": "/:pkg/-/:att/-rev/:rev",
"to": "../../:pkg/:att",
"method": "DELETE"
},
{
"from": "/:pkg",
"to": "/_update/package/:pkg",
"method": "PUT"
},
{
"from": "/:pkg/-rev/:rev",
"to": "/_update/package/:pkg",
"method": "PUT"
},
{
"from": "/:pkg/:version",
"to": "_update/package/:pkg",
"method": "PUT"
},
{
"from": "/:pkg/:version/-rev/:rev",
"to": "_update/package/:pkg",
"method": "PUT"
},
{
"from": "/:pkg/:version/-tag/:tag",
"to": "_update/package/:pkg",
"method": "PUT"
},
{
"from": "/:pkg/:version/-tag/:tag/-rev/:rev",
"to": "_update/package/:pkg",
"method": "PUT"
},
{
"from": "/:pkg/:version/-pre/:pre",
"to": "_update/package/:pkg",
"method": "PUT"
},
{
"from": "/:pkg/:version/-pre/:pre/-rev/:rev",
"to": "_update/package/:pkg",
"method": "PUT"
},
{
"from": "/:pkg/-rev/:rev",
"to": "../../:pkg",
"method": "DELETE"
},
{
"from": "/-/_view/*",
"to": "_view/*",
"method": "GET"
},
{
"from": "/-/_list/*",
"to": "_list/*",
"method": "GET"
},
{
"from": "/-/_show/*",
"to": "_show/*",
"method": "GET"
}
],
"lists": {
"first": "function (head, req) {\n var row = getRow()\n send(toJSON([row, row.id, row.doc]))\n}",
"short": "function (head, req) {\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n\n var out = {}\n , row\n , show = (req.query.show || \"\").split(\",\")\n , v = show.indexOf(\"version\") !== -1\n , t = show.indexOf(\"tag\") !== -1\n while (row = getRow()) {\n if (!row.id) continue\n if (!t && !v) {\n out[row.id] = true\n continue\n }\n var val = row.value\n if (t) Object.keys(val[\"dist-tags\"] || {}).forEach(function (t) {\n out[row.id + \"@\" + t] = true\n })\n if (v) Object.keys(val.versions || {}).forEach(function (v) {\n out[row.id + \"@\" + v] = true\n })\n }\n send(toJSON(Object.keys(out)))\n}",
"rss": "function (head, req) {\n var limit = +req.query.limit\n , desc = req.query.descending\n if (!desc || !limit || limit > 50 || limit < 0) {\n start({ code: 403\n , headers: { 'Content-type': 'text/xml' }})\n send('<error><![CDATA[Please retry your request with '\n + '?descending=true&limit=50 query params]]></error>')\n return\n }\n\n start({ code: 200\n // application/rss+xml is correcter, but also annoyinger\n , headers: { \"Content-Type\": \"text/xml\" } })\n send('<?xml version=\"1.0\" encoding=\"UTF-8\"?>'\n +'\\n<!DOCTYPE rss PUBLIC \"-//Netscape Communications//DTD RSS 0.91//EN\" '\n +'\"http://my.netscape.com/publish/formats/rss-0.91.dtd\">'\n +'\\n<rss version=\"0.91\">'\n +'\\n <channel>'\n +'\\n <title>npm recent updates</title>'\n +'\\n <link>http://search.npmjs.org/</link>'\n +'\\n <description>Updates to the npm package registry</description>'\n +'\\n <language>en</language>')\n\n var row\n while (row = getRow()) {\n if (!row.value || !row.value[\"dist-tags\"]) continue\n\n var doc = row.value\n var authors = doc.maintainers.map(function (m) {\n return '<author>' + m.name + '</author>'\n }).join('\\n ')\n\n var latest = doc[\"dist-tags\"].latest\n var time = doc.time && doc.time[latest]\n var date = new Date(time)\n doc = doc.versions[latest]\n if (!doc || !time || !date) continue\n\n var url = doc.homepage\n , repo = doc.repository || doc.repositories\n if (!url && repo) {\n if (Array.isArray(repo)) repo = repo.shift()\n if (repo.url) repo = repo.url\n if (repo && (typeof repo === \"string\")) {\n url = repo.replace(/^git(@|:\\/\\/)/, 'http://')\n .replace(/\\.git$/, '')+\"#readme\"\n }\n }\n if (!url) url = \"http://search.npmjs.org/#/\" + doc.name\n\n send('\\n <item>'\n +'\\n <title>' + doc._id + '</title>'\n +'\\n <link>' + url + '</link>'\n +'\\n ' + authors\n +'\\n <description><![CDATA['\n + (doc.description || '').trim() + ']]></description>'\n +'\\n <pubDate>' + date.toISOString() + '</pubDate>'\n +'\\n </item>')\n }\n send('\\n </channel>'\n +'\\n</rss>')\n}",
"index": "function (head, req) {\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n var basePath = req.requested_path\n if (basePath.indexOf(\"_list\") === -1) basePath = \"\"\n else {\n basePath = basePath.slice(0, basePath.indexOf(\"_list\"))\n .concat([\"_rewrite\", \"\"]).join(\"/\")\n }\n\n var row\n , semver = require(\"semver\")\n , res = []\n\n if (req.query.jsonp) send(req.query.jsonp + \"(\")\n send('{\"_updated\":' + Date.now())\n while (row = getRow()) {\n if (!row.id) continue\n\n var doc = row.value\n if (!doc.name || !doc._id ||\n encodeURIComponent(doc._id) !== doc._id) continue\n\n var p = {}\n\n // legacy kludge\n delete doc.mtime\n delete doc.ctime\n if (doc.versions) for (var v in doc.versions) {\n var clean = semver.clean(v)\n delete doc.versions[v].ctime\n delete doc.versions[v].mtime\n if (clean !== v) {\n var x = doc.versions[v]\n delete doc.versions[v]\n x.version = v = clean\n doc.versions[clean] = x\n }\n }\n if (doc[\"dist-tags\"]) for (var tag in doc[\"dist-tags\"]) {\n var clean = semver.clean(doc[\"dist-tags\"][tag])\n if (!clean) delete doc[\"dist-tags\"][tag]\n else doc[\"dist-tags\"][tag] = clean\n }\n // end kludge\n\n for (var i in doc) {\n if (i === \"versions\" || i.charAt(0) === \"_\" || i === 'readme' ||\n i === 'time') continue\n p[i] = doc[i]\n }\n if (doc.time) {\n p.time = { modified: doc.time.modified }\n }\n if (p['dist-tags'] && typeof p['dist-tags'] === 'object') {\n p.versions = Object.keys(p['dist-tags']).reduce(function (ac, v) {\n ac[ p['dist-tags'][v] ] = v\n return ac\n }, {})\n }\n if (doc.repositories && Array.isArray(doc.repositories)) {\n doc.repository = doc.repositories[0]\n delete doc.repositories\n }\n if (doc.repository) p.repository = doc.repository\n if (doc.description) p.description = doc.description\n for (var i in doc.versions) {\n if (doc.versions[i].repository && !doc.repository) {\n p.repository = doc.versions[i].repository\n }\n if (doc.versions[i].keywords) p.keywords = doc.versions[i].keywords\n }\n send(',' + JSON.stringify(doc._id) + ':' + JSON.stringify(p))\n }\n send('}')\n if (req.query.jsonp) send(')')\n\n}",
"byField": "function (head, req) {\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n\n if (!req.query.field) {\n start({\"code\":\"400\", \"headers\": {\"Content-Type\": \"application/json\"}})\n send('{\"error\":\"Please specify a field parameter\"}')\n return\n }\n\n start({\"code\": 200, \"headers\": {\"Content-Type\": \"application/json\"}})\n var row\n , out = {}\n , field = req.query.field\n , not = field.charAt(0) === \"!\"\n if (not) field = field.substr(1)\n while (row = getRow()) {\n if (!row.id) continue\n var has = row.value.hasOwnProperty(field)\n if (!not && !has || not && has) continue\n out[row.key] = { \"maintainers\": row.value.maintainers.map(function (m) {\n return m.name + \" <\" + m.email + \">\"\n }) }\n if (has) out[row.key][field] = row.value[field]\n }\n send(JSON.stringify(out))\n}",
"preBuilt": "function (head, req) {\n start({\"code\": 200, \"headers\": {\"Content-Type\": \"text/plain\"}});\n var row\n , out = []\n while (row = getRow()) {\n if (!row.id) continue\n if (!(req.query.bindist && row.value[req.query.bindist])) continue\n out.push(row.key)\n }\n send(out.join(\"\\n\"))\n}",
"needBuild": "function (head, req) {\n start({\"code\": 200, \"headers\": {\"Content-Type\": \"text/plain\"}});\n var row\n , first = true\n while (row = getRow()) {\n if (!row.id) continue\n if (req.query.bindist && row.value[req.query.bindist]) continue\n // out.push(row.key)\n send((first ? \"{\" : \",\")\n + JSON.stringify(row.key)\n + \":\"\n + JSON.stringify(Object.keys(row.value))\n + \"\\n\")\n first = false\n }\n send(\"}\\n\")\n}",
"scripts": "function (head, req) {\n var row\n , out = {}\n , scripts = req.query.scripts && req.query.scripts.split(\",\")\n , match = req.query.match\n\n if (match) match = new RegExp(match)\n\n while (row = getRow()) {\n inc = true\n if (!row.id) continue\n if (req.query.package && row.id !== req.query.package) continue\n if (scripts && scripts.length) {\n var inc = false\n for (var s = 0, l = scripts.length; s < l && !inc; s ++) {\n inc = row.value[scripts[s]]\n if (match) inc = inc && row.value[scripts[s]].match(match)\n }\n if (!inc) continue\n }\n out[row.id] = row.value\n }\n send(toJSON(out))\n}",
"rowdump": "function (head, req) {\n var rows = []\n while (row = getRow()) rows.push(row)\n send(toJSON(rows))\n}",
"passthrough": "function (head, req) {\n var out = {}\n , row\n while (row = getRow()) {\n if (!row.id) continue\n out[row.id] = row.value\n }\n send(toJSON(out))\n}",
"byUser": "function (head, req) {\n var out = {}\n , user = req.query.user && req.query.user !== \"-\" ? req.query.user : null\n , users = user && user.split(\"|\")\n while (row = getRow()) {\n if (!user || users.indexOf(row.key) !== -1) {\n var l = out[row.key] = out[row.key] || []\n l.push(row.value)\n }\n }\n send(toJSON(out))\n}",
"sortCount": "function (head, req) {\n var out = []\n while (row = getRow()) {\n out.push([row.key, row.value])\n }\n out = out.sort(function (a, b) {\n return a[1] === b[1] ? 0\n : a[1] < b[1] ? 1 : -1\n })\n var outObj = {}\n for (var i = 0, l = out.length; i < l; i ++) {\n outObj[out[i][0]] = out[i][1]\n }\n send(toJSON(outObj))\n}",
"size": "function (head, req) {\n var row\n , out = []\n , max = 0\n while (row = getRow()) {\n if (!row.id) continue\n out.push(row.value)\n }\n var list = []\n out = out.sort(function (a, b) {\n max = Math.max(max, a.size, b.size)\n return a.size > b.size ? -1 : 1\n })\n .reduce(function (l, r) {\n var stars = new Array(Math.ceil(80 * (r.size/max)) + 1).join(\"\\u25FE\")\n l[r._id] = { size: r.size\n , count: r.count\n , avg: r.avg\n , rel: r.size / max\n , s: stars\n }\n return l\n }, {})\n send(JSON.stringify(out))\n}",
"histogram": "function (head, req) {\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n start({\"code\": 200, \"headers\": {\"Content-Type\": \"text/plain\"}});\n var row\n , out = []\n , max = {}\n , field = req.query.field\n , sort = req.query.sort\n , doAll = !field\n\n while (row = getRow()) {\n if (!row.id) continue\n out.push(row.value)\n }\n\n if (!doAll) out.sort(function (a, b) {\n max[field] = Math.max(max[field] || -Infinity, a[field], b[field])\n return a[field] > b[field] ? -1 : 1\n })\n else out.sort(function (a, b) {\n for (var field in a) if (field.charAt(0) !== \"_\" && !isNaN(a[field])) {\n max[field] = Math.max(max[field] || -Infinity, a[field])\n }\n for (var field in b) if (field.charAt(0) !== \"_\" && !isNaN(b[field])) {\n max[field] = Math.max(max[field] || -Infinity, b[field])\n }\n if (sort) {\n return Number(a[sort]) > Number(b[sort]) ? -1 : 1\n } else {\n return 0\n }\n })\n if (doAll) {\n // sort the fields by the max sizes.\n var m = {}\n Object.keys(max).sort(function (a, b) {\n return max[a] > max[b] ? -1 : 1\n }).forEach(function (k) { m[k] = max[k] })\n max = m\n }\n out = out.map(function (a) {\n var o = {}\n for (var f in max) {\n var blk = new Array(Math.ceil(80*(a[f] / max[f])+1)).join(\"#\")\n , spc = new Array(80 - blk.length + 1).join(\" \")\n o[f] = spc + blk + \" \" + a[f]\n }\n o._id = a._id\n return o\n }).reduce(function (l, r) {\n l[r._id] = r\n return l\n }, {})\n\n var spc = new Array(82).join(\" \")\n send(Object.keys(out).map(function (i) {\n if (doAll) return [spc + i].concat(Object.keys(max).map(function (f) {\n return out[i][f] + \" \" + f\n })).join(\"\\n\") + \"\\n\"\n return out[i][field] + \" \" + i\n }).join(\"\\n\"))\n}"
},
"validate_doc_update": "function (doc, oldDoc, user, dbCtx) {\n // can't write to the db without logging in.\n if (!user || !user.name) {\n throw { unauthorized: \"Please log in before writing to the db\" }\n }\n\n require(\"monkeypatch\").patch(Object, Date, Array, String)\n\n var semver = require(\"semver\")\n var valid = require(\"valid\")\n var deep = require(\"deep\")\n var deepEquals = deep.deepEquals\n\n\n if (oldDoc) oldDoc.users = oldDoc.users || {}\n doc.users = doc.users || {}\n\n\n function assert (ok, message) {\n if (!ok) throw {forbidden:message}\n }\n\n // admins can do ANYTHING (even break stuff)\n if (isAdmin()) return\n\n // figure out what changed in the doc.\n function diffObj (o, n, p) {\n p = p || \"\"\n var d = []\n var seenKeys = []\n for (var i in o) {\n seenKeys.push(i)\n if (!(i in n)) {\n d.push(\"Deleted: \"+p+i)\n }\n if (typeof o[i] !== typeof n[i]) {\n d.push(\"Changed Type: \"+p+i)\n }\n if (typeof o[i] === \"object\" && o[i] && !n[i]) {\n d.push(\"Nulled: \"+p+i)\n }\n if (typeof o[i] === \"object\" && !o[i] && n[i]) {\n d.push(\"Un-nulled: \"+p+i)\n }\n if (typeof o[i] === \"object\") {\n d = d.concat(diffObj(o[i], n[i], p + i + \".\"))\n } else {\n if (o[i] !== n[i]) {\n d.push(\"Changed: \"+p+i+\" \"+JSON.stringify(o[i]) + \" -> \"\n +JSON.stringify(n[i]))\n }\n }\n }\n for (var i in n) {\n if (-1 === seenKeys.indexOf(i)) {\n d.push(\"Added: \"+p+i)\n }\n }\n return d\n }\n\n // if the doc is an {error:\"blerg\"}, then throw that right out.\n // something detected in the _updates/package script.\n // XXX: Make this not ever happen ever. Validation belongs here,\n // not in the update function.\n assert(!doc.forbidden || doc._deleted, doc.forbidden)\n\n // everyone may alter his \"starred\" status on any package\n if (oldDoc &&\n !doc._deleted &&\n deepEquals(doc, oldDoc,\n [[\"users\", user.name], [\"time\", \"modified\"]])) {\n return\n }\n\n\n // check if the user is allowed to write to this package.\n function validUser () {\n if ( !oldDoc || !oldDoc.maintainers ) return true\n if (isAdmin()) return true\n if (typeof oldDoc.maintainers !== \"object\") return true\n for (var i = 0, l = oldDoc.maintainers.length; i < l; i ++) {\n if (oldDoc.maintainers[i].name === user.name) return true\n }\n return false\n }\n\n function isAdmin () {\n if (dbCtx &&\n dbCtx.admins) {\n if (dbCtx.admins.names &&\n dbCtx.admins.roles &&\n dbCtx.admins.names.indexOf(user.name) !== -1) return true\n for (var i=0;i<user.roles.length;i++) {\n if (dbCtx.admins.roles.indexOf(user.roles[i]) !== -1) return true\n }\n }\n return user.roles.indexOf(\"_admin\") >= 0\n }\n\n var vu = validUser()\n if (!vu) {\n assert(vu, \"user: \" + user.name + \" not authorized to modify \"\n + doc.name + \"\\n\"\n + diffObj(oldDoc, doc).join(\"\\n\"))\n }\n\n // deleting a document entirely *is* allowed.\n if (doc._deleted) return\n\n // sanity checks.\n assert(valid.name(doc.name), \"name invalid: \"+doc.name)\n\n // New documents may only be created with all lowercase names.\n // At some point, existing docs will be migrated to lowercase names\n // as well.\n if (!oldDoc && doc.name !== doc.name.toLowerCase()) {\n assert(false, \"New packages must have all-lowercase names\")\n }\n\n assert(doc.name === doc._id, \"name must match _id\")\n assert(!doc.mtime, \"doc.mtime is deprecated\")\n assert(!doc.ctime, \"doc.ctime is deprecated\")\n assert(typeof doc.time === \"object\", \"time must be object\")\n\n assert(typeof doc[\"dist-tags\"] === \"object\", \"dist-tags must be object\")\n\n var versions = doc.versions\n assert(typeof versions === \"object\", \"versions must be object\")\n\n var latest = doc[\"dist-tags\"].latest\n if (latest) {\n assert(versions[latest], \"dist-tags.latest must be valid version\")\n }\n\n for (var v in doc[\"dist-tags\"]) {\n var ver = doc[\"dist-tags\"][v]\n assert(semver.valid(ver),\n v + \" version invalid version: \" + ver)\n assert(versions[ver],\n v + \" version missing: \" + ver)\n }\n\n for (var ver in versions) {\n var version = versions[ver]\n assert(semver.valid(ver),\n \"invalid version: \" + ver)\n assert(typeof version === \"object\",\n \"version entries must be objects\")\n assert(version.version === ver,\n \"version must match: \"+ver)\n assert(version.name === doc._id,\n \"version \"+ver+\" has incorrect name: \"+version.name)\n }\n\n assert(Array.isArray(doc.maintainers),\n \"maintainers should be a list of owners\")\n doc.maintainers.forEach(function (m) {\n assert(m.name && m.email,\n \"Maintainer should have name and email: \" + JSON.stringify(m))\n })\n\n var time = doc.time\n var c = new Date(Date.parse(time.created))\n , m = new Date(Date.parse(time.modified))\n assert(c.toString() !== \"Invalid Date\",\n \"invalid created time: \" + JSON.stringify(time.created))\n\n assert(m.toString() !== \"Invalid Date\",\n \"invalid modified time: \" + JSON.stringify(time.modified))\n\n if (oldDoc &&\n oldDoc.time &&\n oldDoc.time.created &&\n Date.parse(oldDoc.time.created)) {\n assert(Date.parse(oldDoc.time.created) === Date.parse(time.created),\n \"created time cannot be changed\")\n }\n\n if (oldDoc && oldDoc.users) {\n assert(deepEquals(doc.users,\n oldDoc.users, [[user.name]]),\n \"you may only alter your own 'star' setting\")\n }\n\n if (doc.url) {\n assert(false,\n \"Package redirection has been removed. \"+\n \"Please update your publish scripts.\")\n }\n\n if (doc.description) {\n assert(typeof doc.description === 'string',\n '\"description\" field must be a string')\n }\n\n // at this point, we've passed the basic sanity tests.\n // Time to dig into more details.\n // Valid operations:\n // 1. Add a version\n // 2. Remove a version\n // 3. Modify a version\n // 4. Add or remove onesself from the \"users\" hash (already done)\n //\n // If a version is being added or changed, make sure that the\n // _npmUser field matches the current user, and that the\n // time object has the proper entry, and that the \"maintainers\"\n // matches the current \"maintainers\" field.\n //\n // Things that must not change:\n //\n // 1. More than one version being modified.\n // 2. Removing keys from the \"time\" hash\n //\n // Later, once we are off of the update function 3-stage approach,\n // these things should also be errors:\n //\n // 1. Lacking an attachment for any published version.\n // 2. Having an attachment for any version not published.\n\n var oldVersions = oldDoc ? oldDoc.versions || {} : {}\n var oldTime = oldDoc ? oldDoc.time || {} : {}\n\n var versions = Object.keys(doc.versions)\n , modified = null\n\n for (var i = 0, l = versions.length; i < l; i ++) {\n var v = versions[i]\n if (!v) continue\n assert(doc.time[v], \"must have time entry for \"+v)\n\n if (!deepEquals(doc.versions[v], oldVersions[v], [[\"directories\"], [\"deprecated\"]]) &&\n doc.versions[v]) {\n // this one was modified\n // if it's more than a few minutes off, then something is wrong.\n var t = Date.parse(doc.time[v])\n , n = Date.now()\n // assert(doc.time[v] !== oldTime[v] &&\n // Math.abs(n - t) < 1000 * 60 * 60,\n // v + \" time needs to be updated\\n\" +\n // \"new=\" + JSON.stringify(doc.versions[v]) + \"\\n\" +\n // \"old=\" + JSON.stringify(oldVersions[v]))\n\n // var mt = Date.parse(doc.time.modified).getTime()\n // , vt = t.getTime()\n // assert(Math.abs(mt - vt) < 1000 * 60 * 60,\n // v + \" is modified, should match modified time\")\n\n // XXX Remove the guard these once old docs have been found and\n // fixed. It's too big of a pain to have to manually fix\n // each one every time someone complains.\n if (typeof doc.versions[v]._npmUser !== \"object\") continue\n\n\n assert(typeof doc.versions[v]._npmUser === \"object\",\n \"_npmUser field must be object\\n\"+\n \"(You probably need to upgrade your npm version)\")\n assert(doc.versions[v]._npmUser.name === user.name,\n \"_npmUser.name must === user.name\")\n assert(deepEquals(doc.versions[v].maintainers,\n doc.maintainers),\n \"modified version 'maintainers' must === doc.maintainers\")\n\n // make sure that the _npmUser is one of the maintainers\n var found = false\n for (var j = 0, lm = doc.maintainers.length; j < lm; j ++) {\n var m = doc.maintainers[j]\n if (m.name === doc.versions[v]._npmUser.name) {\n found = true\n break\n }\n }\n assert(found, \"_npmUser must be a current maintainer.\\n\"+\n \"maintainers=\" + JSON.stringify(doc.maintainers)+\"\\n\"+\n \"current user=\" + JSON.stringify(doc.versions[v]._npmUser))\n\n } else if (oldTime[v]) {\n assert(oldTime[v] === doc.time[v],\n v + \" time should not be modified 1\")\n }\n }\n\n // now go through all the time settings that weren't covered\n for (var v in oldTime) {\n if (doc.versions[v] || !oldVersions[v]) continue\n assert(doc.time[v] === oldTime[v],\n v + \" time should not be modified 2\")\n }\n\n}",
"language": "javascript",
"deep": "exports.deepEquals = deepEquals\nexports.extend = deepExtend\nfunction deepExtend(o1, o2) {\n // extend o1 with o2 (in-place)\n for (var prop in o2) {\n if (hOP(o2, prop)) {\n if (hOP(o1, prop)) {\n if (typeof o1[prop] === \"object\") {\n deepExtend(o1[prop], o2[prop])\n }\n } else {\n o1[prop] = o2[prop]\n }\n }\n }\n return o1\n }\nfunction fullPath(pathPrefix, p){\n return pathPrefix.concat([p])\n }\nfunction isObject(v){\n return typeof v === 'object'\n }\nfunction arrayInArray(v, arr) {\n // Check whether `arr` contains an array that's shallowly equal to `v`.\n return arr.some(function(e) {\n if (e.length !== v.length) return false\n for (var i=0; i<e.length; i++) {\n if (e[i] !== v[i]) {\n return false\n }\n }\n return true\n })\n }\nfunction deepEquals(o1, o2, ignoreKeys, pathPrefix){\n pathPrefix = pathPrefix || []\n ignoreKeys = ignoreKeys || []\n function hOP (obj, prop) {\n return Object.prototype.hasOwnProperty.call(obj, prop)\n }\n if (typeof o1 !== typeof o2) {\n return false\n } else if (!isObject(o1)) {\n return o1 === o2\n }\n for (var prop in o1) {\n if (hOP(o1, prop) &&\n !arrayInArray(fullPath(pathPrefix, prop), ignoreKeys)) {\n if (!hOP(o2, prop) ||\n !deepEquals(o1[prop],\n o2[prop],\n ignoreKeys,\n fullPath(pathPrefix, prop))) {\n return false\n }\n }\n }\n for (var prop in o2) {\n if (hOP(o2, prop) &&\n !hOP(o1, prop) &&\n !arrayInArray(fullPath(pathPrefix, prop), ignoreKeys)) {\n return false\n }\n }\n return true\n }",
"semver": "var expr = exports.expression = /^\\s*\\s*[v=]*\\s*([0-9]+)\\.([0-9]+)\\.([0-9]+)(-[0-9]+-?)?([a-zA-Z-][a-zA-Z0-9-.:]*)?\\s*$/\nexports.valid = valid\nexports.clean = clean\nfunction valid(v) {\n return v && typeof v === \"string\" && v.match(expr)\n }\nfunction clean(v) {\n v = valid(v)\n if (!v) return v\n return [ v[1]||'0',\n v[2]||'0',\n v[3]||'0' ].join('.') +\n (v[4]||'') +\n (v[5]||'')\n }",
"valid": "var semver = require(\"semver\")\nexports.name = validName\nexports.package = validPackage\nfunction validName(name) {\n if (!name) return false\n var n = name.trim()\n if (!n || n.charAt(0) === \".\"\n || n.match(/[\\/\\(\\)&\\?#\\|<>@:%\\s\\\\]/)\n || n.toLowerCase() === \"node_modules\"\n || n.toLowerCase() === \"favicon.ico\") {\n return false\n }\n return n\n }\nfunction validPackage(pkg) {\n return validName(pkg.name) && semver.valid(pkg.version)\n }",
"Date": "exports.parse = parse\nexports.toISOString = toISOString\nexports.now = now\nfunction now() {\n return new Date().getTime()\n }\nfunction parse(s) {\n // s is something like \"2010-12-29T07:31:06Z\"\n s = s.split(\"T\")\n var ds = s[0]\n , ts = s[1]\n , d = new Date()\n ds = ds.split(\"-\")\n ts = ts.split(\":\")\n var tz = ts[2].substr(2)\n ts[2] = ts[2].substr(0, 2)\n d.setUTCFullYear(+ds[0])\n d.setUTCMonth(+ds[1]-1)\n d.setUTCDate(+ds[2])\n d.setUTCHours(+ts[0])\n d.setUTCMinutes(+ts[1])\n d.setUTCSeconds(+ts[2])\n d.setUTCMilliseconds(0)\n return d.getTime()\n }\nexports.toISOString = toISOString\nfunction toISOString() { return ISODateString(this) }\nfunction pad(n){return n<10 ? '0'+n : n}\nfunction ISODateString(d){\n return d.getUTCFullYear()+'-'\n + pad(d.getUTCMonth()+1)+'-'\n + pad(d.getUTCDate())+'T'\n + pad(d.getUTCHours())+':'\n + pad(d.getUTCMinutes())+':'\n + pad(d.getUTCSeconds())+'Z'}",
"Object": "exports.keys = keys\nfunction keys(o) {\n var a = []\n for (var i in o) a.push(i)\n return a }",
"Array": "exports.isArray = isArray\nexports.forEach = forEach\n\nexports.reduce = reduce\n\nfunction forEach(fn) {\n for (var i = 0, l = this.length; i < l; i ++) {\n if (this.hasOwnProperty(i)) {\n fn(this[i], i, this)\n }\n }\n }\nfunction reduce(callback, initialValue) {\n var previousValue = initialValue || this[0];\n for (var i = initialValue ? 0 : 1; i < this.length; i++) {\n previousValue = callback(previousValue, this[i], i, this);\n }\n return previousValue;\n }\nfunction isArray(a) {\n return a instanceof Array\n || Object.prototype.toString.call(a) === \"[object Array]\"\n || (typeof a === \"object\" && typeof a.length === \"number\") }",
"String": "exports.trim = trim\nfunction trim() {\n return this.replace(/^\\s+|\\s+$/g, \"\")\n }",
"monkeypatch": "exports.patch = patch\nfunction patch(Object, Date, Array, String) {\n if (!Date.parse || isNaN(Date.parse(\"2010-12-29T07:31:06Z\"))) {\n Date.parse = require(\"Date\").parse\n }\n\n Date.prototype.toISOString = Date.prototype.toISOString\n || require(\"Date\").toISOString\n\n Date.now = Date.now\n || require(\"Date\").now\n\n Object.keys = Object.keys\n || require(\"Object\").keys\n\n Array.prototype.forEach = Array.prototype.forEach\n || require(\"Array\").forEach\n\n Array.prototype.reduce = Array.prototype.reduce\n || require(\"Array\").reduce\n\n Array.isArray = Array.isArray\n || require(\"Array\").isArray\n\n String.prototype.trim = String.prototype.trim\n || require(\"String\").trim\n }",
"attachments_md5": {}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment