Commit 964330ea authored by Medicean's avatar Medicean

Bump tar from 4.4.6 to 4.4.18

parent c07eb3e7
{ {
"name": "antsword", "name": "antsword",
"version": "2.1.13", "version": "2.1.14",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {
...@@ -186,9 +186,9 @@ ...@@ -186,9 +186,9 @@
} }
}, },
"node_modules/chownr": { "node_modules/chownr": {
"version": "1.0.1", "version": "1.1.4",
"resolved": "http://registry.npm.taobao.org/chownr/download/chownr-1.0.1.tgz", "resolved": "https://registry.nlark.com/chownr/download/chownr-1.1.4.tgz",
"integrity": "sha1-4qdQQqlVGQi+vSW4Uj1fl2nXkYE=" "integrity": "sha1-b8nXtC0ypYNZYzdmbn0ICE2izGs="
}, },
"node_modules/co": { "node_modules/co": {
"version": "4.6.0", "version": "4.6.0",
...@@ -600,11 +600,11 @@ ...@@ -600,11 +600,11 @@
"integrity": "sha1-cPt8oCkO5v+WEJBBX0s989IIJlk=" "integrity": "sha1-cPt8oCkO5v+WEJBBX0s989IIJlk="
}, },
"node_modules/fs-minipass": { "node_modules/fs-minipass": {
"version": "1.2.5", "version": "1.2.7",
"resolved": "http://registry.npm.taobao.org/fs-minipass/download/fs-minipass-1.2.5.tgz", "resolved": "https://registry.nlark.com/fs-minipass/download/fs-minipass-1.2.7.tgz",
"integrity": "sha1-BsJ3IYRU7CiN93raVKA7hwKqy50=", "integrity": "sha1-zP+FcIQef+QmVpPaiJNsVa7X98c=",
"dependencies": { "dependencies": {
"minipass": "^2.2.1" "minipass": "^2.6.0"
} }
}, },
"node_modules/ftp": { "node_modules/ftp": {
...@@ -1032,25 +1032,25 @@ ...@@ -1032,25 +1032,25 @@
"integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0="
}, },
"node_modules/minipass": { "node_modules/minipass": {
"version": "2.3.4", "version": "2.9.0",
"resolved": "http://registry.npm.taobao.org/minipass/download/minipass-2.3.4.tgz", "resolved": "https://registry.npmmirror.com/minipass/download/minipass-2.9.0.tgz",
"integrity": "sha1-R2jXYF7WGU1tV2FpueEu9x6dmVc=", "integrity": "sha1-5xN2Ln0+Mv7YAxFc+T4EvKn8yaY=",
"dependencies": { "dependencies": {
"safe-buffer": "^5.1.2", "safe-buffer": "^5.1.2",
"yallist": "^3.0.0" "yallist": "^3.0.0"
} }
}, },
"node_modules/minipass/node_modules/yallist": { "node_modules/minipass/node_modules/yallist": {
"version": "3.0.2", "version": "3.1.1",
"resolved": "http://registry.npm.taobao.org/yallist/download/yallist-3.0.2.tgz", "resolved": "https://registry.nlark.com/yallist/download/yallist-3.1.1.tgz?cache=0&sync_timestamp=1624607893982&other_urls=https%3A%2F%2Fregistry.nlark.com%2Fyallist%2Fdownload%2Fyallist-3.1.1.tgz",
"integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k=" "integrity": "sha1-27fa+b/YusmrRev2ArjLrQ1dCP0="
}, },
"node_modules/minizlib": { "node_modules/minizlib": {
"version": "1.1.0", "version": "1.3.3",
"resolved": "http://registry.npm.taobao.org/minizlib/download/minizlib-1.1.0.tgz", "resolved": "https://registry.nlark.com/minizlib/download/minizlib-1.3.3.tgz",
"integrity": "sha1-EeE2WM5GvDpwomeqxYNZ0eDCnOs=", "integrity": "sha1-IpDeloGKNMKVUcio0wEha9Zahh0=",
"dependencies": { "dependencies": {
"minipass": "^2.2.1" "minipass": "^2.9.0"
} }
}, },
"node_modules/mkdirp": { "node_modules/mkdirp": {
...@@ -1554,26 +1554,47 @@ ...@@ -1554,26 +1554,47 @@
"optional": true "optional": true
}, },
"node_modules/tar": { "node_modules/tar": {
"version": "4.4.6", "version": "4.4.19",
"resolved": "http://registry.npm.taobao.org/tar/download/tar-4.4.6.tgz", "resolved": "https://registry.nlark.com/tar/download/tar-4.4.19.tgz?cache=0&sync_timestamp=1629994977916&other_urls=https%3A%2F%2Fregistry.nlark.com%2Ftar%2Fdownload%2Ftar-4.4.19.tgz",
"integrity": "sha1-YxEPCcALTmCsi8/hvzyGYCNfvJs=", "integrity": "sha1-Lk1yY98m8rkU3uEMglqxMhI3QvM=",
"dependencies": { "dependencies": {
"chownr": "^1.0.1", "chownr": "^1.1.4",
"fs-minipass": "^1.2.5", "fs-minipass": "^1.2.7",
"minipass": "^2.3.3", "minipass": "^2.9.0",
"minizlib": "^1.1.0", "minizlib": "^1.3.3",
"mkdirp": "^0.5.0", "mkdirp": "^0.5.5",
"safe-buffer": "^5.1.2", "safe-buffer": "^5.2.1",
"yallist": "^3.0.2" "yallist": "^3.1.1"
}, },
"engines": { "engines": {
"node": ">=4.5" "node": ">=4.5"
} }
}, },
"node_modules/tar/node_modules/minimist": {
"version": "1.2.5",
"resolved": "https://registry.nlark.com/minimist/download/minimist-1.2.5.tgz?cache=0&sync_timestamp=1624607886507&other_urls=https%3A%2F%2Fregistry.nlark.com%2Fminimist%2Fdownload%2Fminimist-1.2.5.tgz",
"integrity": "sha1-Z9ZgFLZqaoqqDAg8X9WN9OTpdgI="
},
"node_modules/tar/node_modules/mkdirp": {
"version": "0.5.5",
"resolved": "https://registry.npmmirror.com/mkdirp/download/mkdirp-0.5.5.tgz",
"integrity": "sha1-2Rzv1i0UNsoPQWIOJRKI1CAJne8=",
"dependencies": {
"minimist": "^1.2.5"
},
"bin": {
"mkdirp": "bin/cmd.js"
}
},
"node_modules/tar/node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npm.taobao.org/safe-buffer/download/safe-buffer-5.2.1.tgz",
"integrity": "sha1-Hq+fqb2x/dTsdfWPnNtOa3gn7sY="
},
"node_modules/tar/node_modules/yallist": { "node_modules/tar/node_modules/yallist": {
"version": "3.0.2", "version": "3.1.1",
"resolved": "http://registry.npm.taobao.org/yallist/download/yallist-3.0.2.tgz", "resolved": "https://registry.nlark.com/yallist/download/yallist-3.1.1.tgz?cache=0&sync_timestamp=1624607893982&other_urls=https%3A%2F%2Fregistry.nlark.com%2Fyallist%2Fdownload%2Fyallist-3.1.1.tgz",
"integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k=" "integrity": "sha1-27fa+b/YusmrRev2ArjLrQ1dCP0="
}, },
"node_modules/through": { "node_modules/through": {
"version": "2.3.8", "version": "2.3.8",
......
module.exports = chownr 'use strict'
chownr.sync = chownrSync const fs = require('fs')
const path = require('path')
var fs = require("fs") /* istanbul ignore next */
, path = require("path") const LCHOWN = fs.lchown ? 'lchown' : 'chown'
/* istanbul ignore next */
function chownr (p, uid, gid, cb) { const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync'
fs.readdir(p, function (er, children) {
// any error other than ENOTDIR means it's not readable, or /* istanbul ignore next */
// doesn't exist. give up. const needEISDIRHandled = fs.lchown &&
if (er && er.code !== "ENOTDIR") return cb(er) !process.version.match(/v1[1-9]+\./) &&
if (er || !children.length) return fs.chown(p, uid, gid, cb) !process.version.match(/v10\.[6-9]/)
var len = children.length const lchownSync = (path, uid, gid) => {
, errState = null try {
children.forEach(function (child) { return fs[LCHOWNSYNC](path, uid, gid)
var pathChild = path.resolve(p, child); } catch (er) {
fs.lstat(pathChild, function(er, stats) { if (er.code !== 'ENOENT')
if (er) throw er
return cb(er) }
if (!stats.isSymbolicLink()) }
chownr(pathChild, uid, gid, then)
/* istanbul ignore next */
const chownSync = (path, uid, gid) => {
try {
return fs.chownSync(path, uid, gid)
} catch (er) {
if (er.code !== 'ENOENT')
throw er
}
}
/* istanbul ignore next */
const handleEISDIR =
needEISDIRHandled ? (path, uid, gid, cb) => er => {
// Node prior to v10 had a very questionable implementation of
// fs.lchown, which would always try to call fs.open on a directory
// Fall back to fs.chown in those cases.
if (!er || er.code !== 'EISDIR')
cb(er)
else else
then() fs.chown(path, uid, gid, cb)
}
: (_, __, ___, cb) => cb
/* istanbul ignore next */
const handleEISDirSync =
needEISDIRHandled ? (path, uid, gid) => {
try {
return lchownSync(path, uid, gid)
} catch (er) {
if (er.code !== 'EISDIR')
throw er
chownSync(path, uid, gid)
}
}
: (path, uid, gid) => lchownSync(path, uid, gid)
// fs.readdir could only accept an options object as of node v6
const nodeVersion = process.version
let readdir = (path, options, cb) => fs.readdir(path, options, cb)
let readdirSync = (path, options) => fs.readdirSync(path, options)
/* istanbul ignore next */
if (/^v4\./.test(nodeVersion))
readdir = (path, options, cb) => fs.readdir(path, cb)
const chown = (cpath, uid, gid, cb) => {
fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => {
// Skip ENOENT error
cb(er && er.code !== 'ENOENT' ? er : null)
}))
}
const chownrKid = (p, child, uid, gid, cb) => {
if (typeof child === 'string')
return fs.lstat(path.resolve(p, child), (er, stats) => {
// Skip ENOENT error
if (er)
return cb(er.code !== 'ENOENT' ? er : null)
stats.name = child
chownrKid(p, stats, uid, gid, cb)
}) })
if (child.isDirectory()) {
chownr(path.resolve(p, child.name), uid, gid, er => {
if (er)
return cb(er)
const cpath = path.resolve(p, child.name)
chown(cpath, uid, gid, cb)
}) })
function then (er) { } else {
if (errState) return const cpath = path.resolve(p, child.name)
if (er) return cb(errState = er) chown(cpath, uid, gid, cb)
if (-- len === 0) return fs.chown(p, uid, gid, cb)
} }
}
const chownr = (p, uid, gid, cb) => {
readdir(p, { withFileTypes: true }, (er, children) => {
// any error other than ENOTDIR or ENOTSUP means it's not readable,
// or doesn't exist. give up.
if (er) {
if (er.code === 'ENOENT')
return cb()
else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
return cb(er)
}
if (er || !children.length)
return chown(p, uid, gid, cb)
let len = children.length
let errState = null
const then = er => {
if (errState)
return
if (er)
return cb(errState = er)
if (-- len === 0)
return chown(p, uid, gid, cb)
}
children.forEach(child => chownrKid(p, child, uid, gid, then))
}) })
} }
function chownrSync (p, uid, gid) { const chownrKidSync = (p, child, uid, gid) => {
var children if (typeof child === 'string') {
try { try {
children = fs.readdirSync(p) const stats = fs.lstatSync(path.resolve(p, child))
stats.name = child
child = stats
} catch (er) { } catch (er) {
if (er && er.code === "ENOTDIR") return fs.chownSync(p, uid, gid) if (er.code === 'ENOENT')
return
else
throw er throw er
} }
if (!children.length) return fs.chownSync(p, uid, gid) }
children.forEach(function (child) { if (child.isDirectory())
var pathChild = path.resolve(p, child) chownrSync(path.resolve(p, child.name), uid, gid)
var stats = fs.lstatSync(pathChild)
if (!stats.isSymbolicLink()) handleEISDirSync(path.resolve(p, child.name), uid, gid)
chownrSync(pathChild, uid, gid) }
})
return fs.chownSync(p, uid, gid) const chownrSync = (p, uid, gid) => {
let children
try {
children = readdirSync(p, { withFileTypes: true })
} catch (er) {
if (er.code === 'ENOENT')
return
else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP')
return handleEISDirSync(p, uid, gid)
else
throw er
}
if (children && children.length)
children.forEach(child => chownrKidSync(p, child, uid, gid))
return handleEISDirSync(p, uid, gid)
} }
module.exports = chownr
chownr.sync = chownrSync
{ {
"_from": "chownr@^1.0.1", "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"_id": "chownr@1.0.1",
"_inBundle": false,
"_integrity": "sha1-4qdQQqlVGQi+vSW4Uj1fl2nXkYE=",
"_location": "/chownr",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "chownr@^1.0.1",
"name": "chownr", "name": "chownr",
"escapedName": "chownr",
"rawSpec": "^1.0.1",
"saveSpec": null,
"fetchSpec": "^1.0.1"
},
"_requiredBy": [
"/tar"
],
"_resolved": "http://registry.npm.taobao.org/chownr/download/chownr-1.0.1.tgz",
"_shasum": "e2a75042a9551908bebd25b8523d5f9769d79181",
"_spec": "chownr@^1.0.1",
"_where": "/Users/medicean/workspace/antSword/node_modules/tar",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/chownr/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "like `chown -R`", "description": "like `chown -R`",
"devDependencies": { "version": "1.1.4",
"mkdirp": "0.3", "repository": {
"rimraf": "", "type": "git",
"tap": "^1.2.0" "url": "git://github.com/isaacs/chownr.git"
}, },
"main": "chownr.js",
"files": [ "files": [
"chownr.js" "chownr.js"
], ],
"homepage": "https://github.com/isaacs/chownr#readme", "devDependencies": {
"license": "ISC", "mkdirp": "0.3",
"main": "chownr.js", "rimraf": "^2.7.1",
"name": "chownr", "tap": "^14.10.6"
"repository": { },
"type": "git", "tap": {
"url": "git://github.com/isaacs/chownr.git" "check-coverage": true
}, },
"scripts": { "scripts": {
"test": "tap test/*.js" "test": "tap",
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags"
}, },
"version": "1.0.1" "license": "ISC"
} }
...@@ -6,7 +6,8 @@ const fs = require('fs') ...@@ -6,7 +6,8 @@ const fs = require('fs')
// for writev // for writev
const binding = process.binding('fs') const binding = process.binding('fs')
const writeBuffers = binding.writeBuffers const writeBuffers = binding.writeBuffers
const FSReqWrap = binding.FSReqWrap /* istanbul ignore next */
const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback
const _autoClose = Symbol('_autoClose') const _autoClose = Symbol('_autoClose')
const _close = Symbol('_close') const _close = Symbol('_close')
......
{ {
"_from": "fs-minipass@^1.2.5",
"_id": "fs-minipass@1.2.5",
"_inBundle": false,
"_integrity": "sha1-BsJ3IYRU7CiN93raVKA7hwKqy50=",
"_location": "/fs-minipass",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "fs-minipass@^1.2.5",
"name": "fs-minipass", "name": "fs-minipass",
"escapedName": "fs-minipass", "version": "1.2.7",
"rawSpec": "^1.2.5", "main": "index.js",
"saveSpec": null, "scripts": {
"fetchSpec": "^1.2.5" "test": "tap",
"preversion": "npm test",
"postversion": "npm publish",
"postpublish": "git push origin --follow-tags"
}, },
"_requiredBy": [ "keywords": [],
"/tar" "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
], "license": "ISC",
"_resolved": "http://registry.npm.taobao.org/fs-minipass/download/fs-minipass-1.2.5.tgz", "repository": {
"_shasum": "06c277218454ec288df77ada54a03b8702aacb9d", "type": "git",
"_spec": "fs-minipass@^1.2.5", "url": "git+https://github.com/npm/fs-minipass.git"
"_where": "/Users/medicean/workspace/antSword/node_modules/tar",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
}, },
"bugs": { "bugs": {
"url": "https://github.com/npm/fs-minipass/issues" "url": "https://github.com/npm/fs-minipass/issues"
}, },
"bundleDependencies": false, "homepage": "https://github.com/npm/fs-minipass#readme",
"description": "fs read and write streams based on minipass",
"dependencies": { "dependencies": {
"minipass": "^2.2.1" "minipass": "^2.6.0"
}, },
"deprecated": false,
"description": "fs read and write streams based on minipass",
"devDependencies": { "devDependencies": {
"mutate-fs": "^2.0.1", "mutate-fs": "^2.0.1",
"tap": "^10.7.2" "tap": "^14.6.4"
}, },
"files": [ "files": [
"index.js" "index.js"
], ],
"homepage": "https://github.com/npm/fs-minipass#readme", "tap": {
"keywords": [], "check-coverage": true
"license": "ISC", }
"main": "index.js",
"name": "fs-minipass",
"repository": {
"type": "git",
"url": "git+https://github.com/npm/fs-minipass.git"
},
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js --100 -J"
},
"version": "1.2.5"
} }
This diff is collapsed.
This diff is collapsed.
'use strict' 'use strict'
var Yallist = require('./yallist.js') module.exports = function (Yallist) {
Yallist.prototype[Symbol.iterator] = function* () {
Yallist.prototype[Symbol.iterator] = function* () {
for (let walker = this.head; walker; walker = walker.next) { for (let walker = this.head; walker; walker = walker.next) {
yield walker.value yield walker.value
} }
}
} }
{ {
"_from": "yallist@^3.0.0",
"_id": "yallist@3.0.2",
"_inBundle": false,
"_integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k=",
"_location": "/minipass/yallist",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "yallist@^3.0.0",
"name": "yallist", "name": "yallist",
"escapedName": "yallist", "version": "3.1.1",
"rawSpec": "^3.0.0",
"saveSpec": null,
"fetchSpec": "^3.0.0"
},
"_requiredBy": [
"/minipass"
],
"_resolved": "http://registry.npm.taobao.org/yallist/download/yallist-3.0.2.tgz",
"_shasum": "8452b4bb7e83c7c188d8041c1a837c773d6d8bb9",
"_spec": "yallist@^3.0.0",
"_where": "/Users/medicean/workspace/antSword/node_modules/minipass",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/yallist/issues"
},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"description": "Yet Another Linked List", "description": "Yet Another Linked List",
"devDependencies": { "main": "yallist.js",
"tap": "^10.3.0"
},
"directories": { "directories": {
"test": "test" "test": "test"
}, },
...@@ -44,19 +10,20 @@ ...@@ -44,19 +10,20 @@
"yallist.js", "yallist.js",
"iterator.js" "iterator.js"
], ],
"homepage": "https://github.com/isaacs/yallist#readme", "dependencies": {},
"license": "ISC", "devDependencies": {
"main": "yallist.js", "tap": "^12.1.0"
"name": "yallist",
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/yallist.git"
}, },
"scripts": { "scripts": {
"postpublish": "git push origin --all; git push origin --tags", "test": "tap test/*.js --100",
"postversion": "npm publish",
"preversion": "npm test", "preversion": "npm test",
"test": "tap test/*.js --100" "postversion": "npm publish",
"postpublish": "git push origin --all; git push origin --tags"
},
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/yallist.git"
}, },
"version": "3.0.2" "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"license": "ISC"
} }
...@@ -54,6 +54,8 @@ Yallist.prototype.removeNode = function (node) { ...@@ -54,6 +54,8 @@ Yallist.prototype.removeNode = function (node) {
node.next = null node.next = null
node.prev = null node.prev = null
node.list = null node.list = null
return next
} }
Yallist.prototype.unshiftNode = function (node) { Yallist.prototype.unshiftNode = function (node) {
...@@ -318,6 +320,37 @@ Yallist.prototype.sliceReverse = function (from, to) { ...@@ -318,6 +320,37 @@ Yallist.prototype.sliceReverse = function (from, to) {
return ret return ret
} }
Yallist.prototype.splice = function (start, deleteCount /*, ...nodes */) {
if (start > this.length) {
start = this.length - 1
}
if (start < 0) {
start = this.length + start;
}
for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
walker = walker.next
}
var ret = []
for (var i = 0; walker && i < deleteCount; i++) {
ret.push(walker.value)
walker = this.removeNode(walker)
}
if (walker === null) {
walker = this.tail
}
if (walker !== this.head && walker !== this.tail) {
walker = walker.prev
}
for (var i = 2; i < arguments.length; i++) {
walker = insert(this, walker, arguments[i])
}
return ret;
}
Yallist.prototype.reverse = function () { Yallist.prototype.reverse = function () {
var head = this.head var head = this.head
var tail = this.tail var tail = this.tail
...@@ -331,6 +364,23 @@ Yallist.prototype.reverse = function () { ...@@ -331,6 +364,23 @@ Yallist.prototype.reverse = function () {
return this return this
} }
function insert (self, node, value) {
var inserted = node === self.head ?
new Node(value, null, node, self) :
new Node(value, node, node.next, self)
if (inserted.next === null) {
self.tail = inserted
}
if (inserted.prev === null) {
self.head = inserted
}
self.length++
return inserted
}
function push (self, item) { function push (self, item) {
self.tail = new Node(item, self.tail, null, self) self.tail = new Node(item, self.tail, null, self)
if (!self.head) { if (!self.head) {
...@@ -371,6 +421,6 @@ function Node (value, prev, next, list) { ...@@ -371,6 +421,6 @@ function Node (value, prev, next, list) {
} }
try { try {
// add if support or Symbol.iterator is present // add if support for Symbol.iterator is present
require('./iterator.js') require('./iterator.js')(Yallist)
} catch (er) {} } catch (er) {}
{ {
"_from": "minipass@^2.3.3",
"_id": "minipass@2.3.4",
"_inBundle": false,
"_integrity": "sha1-R2jXYF7WGU1tV2FpueEu9x6dmVc=",
"_location": "/minipass",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "minipass@^2.3.3",
"name": "minipass", "name": "minipass",
"escapedName": "minipass", "version": "2.9.0",
"rawSpec": "^2.3.3", "description": "minimal implementation of a PassThrough stream",
"saveSpec": null, "main": "index.js",
"fetchSpec": "^2.3.3"
},
"_requiredBy": [
"/fs-minipass",
"/minizlib",
"/tar"
],
"_resolved": "http://registry.npm.taobao.org/minipass/download/minipass-2.3.4.tgz",
"_shasum": "4768d7605ed6194d6d576169b9e12ef71e9d9957",
"_spec": "minipass@^2.3.3",
"_where": "/Users/medicean/workspace/antSword/node_modules/tar",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/minipass/issues"
},
"bundleDependencies": false,
"dependencies": { "dependencies": {
"safe-buffer": "^5.1.2", "safe-buffer": "^5.1.2",
"yallist": "^3.0.0" "yallist": "^3.0.0"
}, },
"deprecated": false,
"description": "minimal implementation of a PassThrough stream",
"devDependencies": { "devDependencies": {
"end-of-stream": "^1.4.0", "end-of-stream": "^1.4.0",
"tap": "^12.0.1", "tap": "^14.6.5",
"through2": "^2.0.3" "through2": "^2.0.3"
}, },
"files": [ "scripts": {
"index.js" "test": "tap",
], "preversion": "npm test",
"homepage": "https://github.com/isaacs/minipass#readme", "postversion": "npm publish",
"postpublish": "git push origin --follow-tags"
},
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/minipass.git"
},
"keywords": [ "keywords": [
"passthrough", "passthrough",
"stream" "stream"
], ],
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"license": "ISC", "license": "ISC",
"main": "index.js", "files": [
"name": "minipass", "index.js"
"repository": { ],
"type": "git", "tap": {
"url": "git+https://github.com/isaacs/minipass.git" "check-coverage": true
}, }
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js --100"
},
"version": "2.3.4"
} }
# minizlib # minizlib
A tiny fast zlib stream built on [minipass](http://npm.im/minipass) A fast zlib stream built on [minipass](http://npm.im/minipass) and
and Node.js's zlib binding. Node.js's zlib binding.
This module was created to serve the needs of This module was created to serve the needs of
[node-tar](http://npm.im/tar) v2. If your needs are different, then [node-tar](http://npm.im/tar) and
it may not be for you. [minipass-fetch](http://npm.im/minipass-fetch).
Brotli is supported in versions of node with a Brotli binding.
## How does this differ from the streams in `require('zlib')`? ## How does this differ from the streams in `require('zlib')`?
First, there are no convenience methods to compress or decompress a First, there are no convenience methods to compress or decompress a
buffer. If you want those, use the built-in `zlib` module. This is buffer. If you want those, use the built-in `zlib` module. This is
only streams. only streams. That being said, Minipass streams to make it fairly easy to
use as one-liners: `new zlib.Deflate().end(data).read()` will return the
deflate compressed result.
This module compresses and decompresses the data as fast as you feed This module compresses and decompresses the data as fast as you feed
it in. It is synchronous, and runs on the main process thread. Zlib it in. It is synchronous, and runs on the main process thread. Zlib
operations can be high CPU, but they're very fast, and doing it this and Brotli operations can be high CPU, but they're very fast, and doing it
way means much less bookkeeping and artificial deferral. this way means much less bookkeeping and artificial deferral.
Node's built in zlib streams are built on top of `stream.Transform`. Node's built in zlib streams are built on top of `stream.Transform`.
They do the maximally safe thing with respect to consistent They do the maximally safe thing with respect to consistent
asynchrony, buffering, and backpressure. asynchrony, buffering, and backpressure.
This module _does_ support backpressure, and will buffer output chunks See [Minipass](http://npm.im/minipass) for more on the differences between
that are not consumed, but is less of a mediator between the input and Node.js core streams and Minipass streams, and the convenience methods
output. There is no high or low watermarks, no state objects, and so provided by that class.
artificial async deferrals. It will not protect you from Zalgo.
## Classes
If you write, data will be emitted right away. If you write
everything synchronously in one tick, and you are listening to the - Deflate
`data` event to consume it, then it'll all be emitted right away in - Inflate
that same tick. If you want data to be emitted in the next tick, then - Gzip
write it in the next tick. - Gunzip
- DeflateRaw
It is thus the responsibility of the reader and writer to manage their - InflateRaw
own consumption and process execution flow. - Unzip
- BrotliCompress (Node v10 and higher)
The goal is to compress and decompress as fast as possible, even for - BrotliDecompress (Node v10 and higher)
files that are too large to store all in one buffer.
## USAGE
The API is very similar to the built-in zlib module. There are
classes that you instantiate with `new` and they are streams that can ```js
be piped together. const zlib = require('minizlib')
const input = sourceOfCompressedData()
const decode = new zlib.BrotliDecompress()
const output = whereToWriteTheDecodedData()
input.pipe(decode).pipe(output)
```
module.exports = Object.freeze({ // Update with any zlib constants that are added or changed in the future.
// Node v6 didn't export this, so we just hard code the version and rely
// on all the other hard-coded values from zlib v4736. When node v6
// support drops, we can just export the realZlibConstants object.
const realZlibConstants = require('zlib').constants ||
/* istanbul ignore next */ { ZLIB_VERNUM: 4736 }
module.exports = Object.freeze(Object.assign(Object.create(null), {
Z_NO_FLUSH: 0, Z_NO_FLUSH: 0,
Z_PARTIAL_FLUSH: 1, Z_PARTIAL_FLUSH: 1,
Z_SYNC_FLUSH: 2, Z_SYNC_FLUSH: 2,
...@@ -23,7 +30,6 @@ module.exports = Object.freeze({ ...@@ -23,7 +30,6 @@ module.exports = Object.freeze({
Z_RLE: 3, Z_RLE: 3,
Z_FIXED: 4, Z_FIXED: 4,
Z_DEFAULT_STRATEGY: 0, Z_DEFAULT_STRATEGY: 0,
ZLIB_VERNUM: 4736,
DEFLATE: 1, DEFLATE: 1,
INFLATE: 2, INFLATE: 2,
GZIP: 3, GZIP: 3,
...@@ -31,6 +37,8 @@ module.exports = Object.freeze({ ...@@ -31,6 +37,8 @@ module.exports = Object.freeze({
DEFLATERAW: 5, DEFLATERAW: 5,
INFLATERAW: 6, INFLATERAW: 6,
UNZIP: 7, UNZIP: 7,
BROTLI_DECODE: 8,
BROTLI_ENCODE: 9,
Z_MIN_WINDOWBITS: 8, Z_MIN_WINDOWBITS: 8,
Z_MAX_WINDOWBITS: 15, Z_MAX_WINDOWBITS: 15,
Z_DEFAULT_WINDOWBITS: 15, Z_DEFAULT_WINDOWBITS: 15,
...@@ -42,5 +50,66 @@ module.exports = Object.freeze({ ...@@ -42,5 +50,66 @@ module.exports = Object.freeze({
Z_DEFAULT_MEMLEVEL: 8, Z_DEFAULT_MEMLEVEL: 8,
Z_MIN_LEVEL: -1, Z_MIN_LEVEL: -1,
Z_MAX_LEVEL: 9, Z_MAX_LEVEL: 9,
Z_DEFAULT_LEVEL: -1 Z_DEFAULT_LEVEL: -1,
}) BROTLI_OPERATION_PROCESS: 0,
BROTLI_OPERATION_FLUSH: 1,
BROTLI_OPERATION_FINISH: 2,
BROTLI_OPERATION_EMIT_METADATA: 3,
BROTLI_MODE_GENERIC: 0,
BROTLI_MODE_TEXT: 1,
BROTLI_MODE_FONT: 2,
BROTLI_DEFAULT_MODE: 0,
BROTLI_MIN_QUALITY: 0,
BROTLI_MAX_QUALITY: 11,
BROTLI_DEFAULT_QUALITY: 11,
BROTLI_MIN_WINDOW_BITS: 10,
BROTLI_MAX_WINDOW_BITS: 24,
BROTLI_LARGE_MAX_WINDOW_BITS: 30,
BROTLI_DEFAULT_WINDOW: 22,
BROTLI_MIN_INPUT_BLOCK_BITS: 16,
BROTLI_MAX_INPUT_BLOCK_BITS: 24,
BROTLI_PARAM_MODE: 0,
BROTLI_PARAM_QUALITY: 1,
BROTLI_PARAM_LGWIN: 2,
BROTLI_PARAM_LGBLOCK: 3,
BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
BROTLI_PARAM_SIZE_HINT: 5,
BROTLI_PARAM_LARGE_WINDOW: 6,
BROTLI_PARAM_NPOSTFIX: 7,
BROTLI_PARAM_NDIRECT: 8,
BROTLI_DECODER_RESULT_ERROR: 0,
BROTLI_DECODER_RESULT_SUCCESS: 1,
BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
BROTLI_DECODER_NO_ERROR: 0,
BROTLI_DECODER_SUCCESS: 1,
BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
BROTLI_DECODER_ERROR_UNREACHABLE: -31,
}, realZlibConstants))
This diff is collapsed.
{ {
"_from": "minizlib@^1.1.0",
"_id": "minizlib@1.1.0",
"_inBundle": false,
"_integrity": "sha1-EeE2WM5GvDpwomeqxYNZ0eDCnOs=",
"_location": "/minizlib",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "minizlib@^1.1.0",
"name": "minizlib", "name": "minizlib",
"escapedName": "minizlib", "version": "1.3.3",
"rawSpec": "^1.1.0", "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
"saveSpec": null, "main": "index.js",
"fetchSpec": "^1.1.0"
},
"_requiredBy": [
"/tar"
],
"_resolved": "http://registry.npm.taobao.org/minizlib/download/minizlib-1.1.0.tgz",
"_shasum": "11e13658ce46bc3a70a267aac58359d1e0c29ceb",
"_spec": "minizlib@^1.1.0",
"_where": "/Users/medicean/workspace/antSword/node_modules/tar",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/minizlib/issues"
},
"bundleDependencies": false,
"dependencies": { "dependencies": {
"minipass": "^2.2.1" "minipass": "^2.9.0"
}, },
"deprecated": false, "scripts": {
"description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.", "test": "tap test/*.js --100 -J",
"devDependencies": { "preversion": "npm test",
"tap": "^10.7.2" "postversion": "npm publish",
"postpublish": "git push origin --all; git push origin --tags"
},
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/minizlib.git"
}, },
"files": [
"index.js",
"constants.js"
],
"homepage": "https://github.com/isaacs/minizlib#readme",
"keywords": [ "keywords": [
"zlib", "zlib",
"gzip", "gzip",
...@@ -54,18 +26,13 @@ ...@@ -54,18 +26,13 @@
"zip", "zip",
"unzip" "unzip"
], ],
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"license": "MIT", "license": "MIT",
"main": "index.js", "devDependencies": {
"name": "minizlib", "tap": "^12.0.1"
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/minizlib.git"
},
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js --100 -J"
}, },
"version": "1.1.0" "files": [
"index.js",
"constants.js"
]
} }
...@@ -108,7 +108,7 @@ tar.c( // or tar.create ...@@ -108,7 +108,7 @@ tar.c( // or tar.create
gzip: <true|gzip options> gzip: <true|gzip options>
}, },
['some', 'files', 'and', 'folders'] ['some', 'files', 'and', 'folders']
).pipe(fs.createWriteStream('my-tarball.tgz') ).pipe(fs.createWriteStream('my-tarball.tgz'))
``` ```
To replicate `tar xf my-tarball.tgz` you'd do: To replicate `tar xf my-tarball.tgz` you'd do:
......
...@@ -6,6 +6,7 @@ const Unpack = require('./unpack.js') ...@@ -6,6 +6,7 @@ const Unpack = require('./unpack.js')
const fs = require('fs') const fs = require('fs')
const fsm = require('fs-minipass') const fsm = require('fs-minipass')
const path = require('path') const path = require('path')
const stripSlash = require('./strip-trailing-slashes.js')
const x = module.exports = (opt_, files, cb) => { const x = module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function') if (typeof opt_ === 'function')
...@@ -41,7 +42,7 @@ const x = module.exports = (opt_, files, cb) => { ...@@ -41,7 +42,7 @@ const x = module.exports = (opt_, files, cb) => {
// construct a filter that limits the file entries listed // construct a filter that limits the file entries listed
// include child entries if a dir is included // include child entries if a dir is included
const filesFilter = (opt, files) => { const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true])) const map = new Map(files.map(f => [stripSlash(f), true]))
const filter = opt.filter const filter = opt.filter
const mapHas = (file, r) => { const mapHas = (file, r) => {
...@@ -55,8 +56,8 @@ const filesFilter = (opt, files) => { ...@@ -55,8 +56,8 @@ const filesFilter = (opt, files) => {
} }
opt.filter = filter opt.filter = filter
? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, '')) ? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
: file => mapHas(file.replace(/\/+$/, '')) : file => mapHas(stripSlash(file))
} }
const extractFileSync = opt => { const extractFileSync = opt => {
......
'use strict' 'use strict'
// Tar can encode large and negative numbers using a leading byte of // Tar can encode large and negative numbers using a leading byte of
// 0xff for negative, and 0x80 for positive. The trailing byte in the // 0xff for negative, and 0x80 for positive.
// section will always be 0x20, or in some implementations 0x00.
// this module encodes and decodes these things.
const encode = exports.encode = (num, buf) => { const encode = exports.encode = (num, buf) => {
buf[buf.length - 1] = 0x20 if (!Number.isSafeInteger(num))
if (num < 0) // The number is so large that javascript cannot represent it with integer
// precision.
throw TypeError('cannot encode number outside of javascript safe integer range')
else if (num < 0)
encodeNegative(num, buf) encodeNegative(num, buf)
else else
encodePositive(num, buf) encodePositive(num, buf)
...@@ -15,35 +16,27 @@ const encode = exports.encode = (num, buf) => { ...@@ -15,35 +16,27 @@ const encode = exports.encode = (num, buf) => {
const encodePositive = (num, buf) => { const encodePositive = (num, buf) => {
buf[0] = 0x80 buf[0] = 0x80
for (var i = buf.length - 2; i > 0; i--) {
if (num === 0) for (var i = buf.length; i > 1; i--) {
buf[i] = 0 buf[i-1] = num & 0xff
else {
buf[i] = num % 0x100
num = Math.floor(num / 0x100) num = Math.floor(num / 0x100)
} }
}
} }
const encodeNegative = (num, buf) => { const encodeNegative = (num, buf) => {
buf[0] = 0xff buf[0] = 0xff
var flipped = false var flipped = false
num = num * -1 num = num * -1
for (var i = buf.length - 2; i > 0; i--) { for (var i = buf.length; i > 1; i--) {
var byte var byte = num & 0xff
if (num === 0)
byte = 0
else {
byte = num % 0x100
num = Math.floor(num / 0x100) num = Math.floor(num / 0x100)
}
if (flipped) if (flipped)
buf[i] = onesComp(byte) buf[i-1] = onesComp(byte)
else if (byte === 0) else if (byte === 0)
buf[i] = 0 buf[i-1] = 0
else { else {
flipped = true flipped = true
buf[i] = twosComp(byte) buf[i-1] = twosComp(byte)
} }
} }
} }
...@@ -51,8 +44,20 @@ const encodeNegative = (num, buf) => { ...@@ -51,8 +44,20 @@ const encodeNegative = (num, buf) => {
const parse = exports.parse = (buf) => { const parse = exports.parse = (buf) => {
var post = buf[buf.length - 1] var post = buf[buf.length - 1]
var pre = buf[0] var pre = buf[0]
return pre === 0x80 ? pos(buf.slice(1, buf.length - 1)) var value;
: twos(buf.slice(1, buf.length - 1)) if (pre === 0x80)
value = pos(buf.slice(1, buf.length))
else if (pre === 0xff)
value = twos(buf)
else
throw TypeError('invalid base256 encoding')
if (!Number.isSafeInteger(value))
// The number is so large that javascript cannot represent it with integer
// precision.
throw TypeError('parsed number outside of javascript safe integer range')
return value
} }
const twos = (buf) => { const twos = (buf) => {
...@@ -71,9 +76,9 @@ const twos = (buf) => { ...@@ -71,9 +76,9 @@ const twos = (buf) => {
f = twosComp(byte) f = twosComp(byte)
} }
if (f !== 0) if (f !== 0)
sum += f * Math.pow(256, len - i - 1) sum -= f * Math.pow(256, len - i - 1)
} }
return sum * -1 return sum
} }
const pos = (buf) => { const pos = (buf) => {
......
...@@ -11,6 +11,7 @@ const Parser = require('./parse.js') ...@@ -11,6 +11,7 @@ const Parser = require('./parse.js')
const fs = require('fs') const fs = require('fs')
const fsm = require('fs-minipass') const fsm = require('fs-minipass')
const path = require('path') const path = require('path')
const stripSlash = require('./strip-trailing-slashes.js')
const t = module.exports = (opt_, files, cb) => { const t = module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function') if (typeof opt_ === 'function')
...@@ -56,7 +57,7 @@ const onentryFunction = opt => { ...@@ -56,7 +57,7 @@ const onentryFunction = opt => {
// construct a filter that limits the file entries listed // construct a filter that limits the file entries listed
// include child entries if a dir is included // include child entries if a dir is included
const filesFilter = (opt, files) => { const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true])) const map = new Map(files.map(f => [stripSlash(f), true]))
const filter = opt.filter const filter = opt.filter
const mapHas = (file, r) => { const mapHas = (file, r) => {
...@@ -70,8 +71,8 @@ const filesFilter = (opt, files) => { ...@@ -70,8 +71,8 @@ const filesFilter = (opt, files) => {
} }
opt.filter = filter opt.filter = filter
? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, '')) ? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
: file => mapHas(file.replace(/\/+$/, '')) : file => mapHas(stripSlash(file))
} }
const listFileSync = opt => { const listFileSync = opt => {
......
...@@ -8,6 +8,7 @@ const mkdirp = require('mkdirp') ...@@ -8,6 +8,7 @@ const mkdirp = require('mkdirp')
const fs = require('fs') const fs = require('fs')
const path = require('path') const path = require('path')
const chownr = require('chownr') const chownr = require('chownr')
const normPath = require('./normalize-windows-path.js')
class SymlinkError extends Error { class SymlinkError extends Error {
constructor (symlink, path) { constructor (symlink, path) {
...@@ -33,7 +34,20 @@ class CwdError extends Error { ...@@ -33,7 +34,20 @@ class CwdError extends Error {
} }
} }
const mkdir = module.exports = (dir, opt, cb) => { const cGet = (cache, key) => cache.get(normPath(key))
const cSet = (cache, key, val) => cache.set(normPath(key), val)
const checkCwd = (dir, cb) => {
fs.stat(dir, (er, st) => {
if (er || !st.isDirectory())
er = new CwdError(dir, er && er.code || 'ENOTDIR')
cb(er)
})
}
module.exports = (dir, opt, cb) => {
dir = normPath(dir)
// if there's any overlap between mask and mode, // if there's any overlap between mask and mode,
// then we'll need an explicit chmod // then we'll need an explicit chmod
const umask = opt.umask const umask = opt.umask
...@@ -49,13 +63,13 @@ const mkdir = module.exports = (dir, opt, cb) => { ...@@ -49,13 +63,13 @@ const mkdir = module.exports = (dir, opt, cb) => {
const preserve = opt.preserve const preserve = opt.preserve
const unlink = opt.unlink const unlink = opt.unlink
const cache = opt.cache const cache = opt.cache
const cwd = opt.cwd const cwd = normPath(opt.cwd)
const done = (er, created) => { const done = (er, created) => {
if (er) if (er)
cb(er) cb(er)
else { else {
cache.set(dir, true) cSet(cache, dir, true)
if (created && doChown) if (created && doChown)
chownr(created, uid, gid, er => done(er)) chownr(created, uid, gid, er => done(er))
else if (needChmod) else if (needChmod)
...@@ -65,21 +79,17 @@ const mkdir = module.exports = (dir, opt, cb) => { ...@@ -65,21 +79,17 @@ const mkdir = module.exports = (dir, opt, cb) => {
} }
} }
if (cache && cache.get(dir) === true) if (cache && cGet(cache, dir) === true)
return done() return done()
if (dir === cwd) if (dir === cwd)
return fs.lstat(dir, (er, st) => { return checkCwd(dir, done)
if (er || !st.isDirectory())
er = new CwdError(dir, er && er.code || 'ENOTDIR')
done(er)
})
if (preserve) if (preserve)
return mkdirp(dir, mode, done) return mkdirp(dir, mode, done)
const sub = path.relative(cwd, dir) const sub = normPath(path.relative(cwd, dir))
const parts = sub.split(/\/|\\/) const parts = sub.split('/')
mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done) mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
} }
...@@ -87,22 +97,19 @@ const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { ...@@ -87,22 +97,19 @@ const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
if (!parts.length) if (!parts.length)
return cb(null, created) return cb(null, created)
const p = parts.shift() const p = parts.shift()
const part = base + '/' + p const part = normPath(path.resolve(base + '/' + p))
if (cache.get(part)) if (cGet(cache, part))
return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
} }
const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => { const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
if (er) { if (er) {
if (er.path && path.dirname(er.path) === cwd &&
(er.code === 'ENOTDIR' || er.code === 'ENOENT'))
return cb(new CwdError(cwd, er.code))
fs.lstat(part, (statEr, st) => { fs.lstat(part, (statEr, st) => {
if (statEr) if (statEr) {
statEr.path = statEr.path && normPath(statEr.path)
cb(statEr) cb(statEr)
else if (st.isDirectory()) } else if (st.isDirectory())
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
else if (unlink) else if (unlink)
fs.unlink(part, er => { fs.unlink(part, er => {
...@@ -121,7 +128,21 @@ const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => { ...@@ -121,7 +128,21 @@ const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
} }
} }
const mkdirSync = module.exports.sync = (dir, opt) => { const checkCwdSync = dir => {
let ok = false
let code = 'ENOTDIR'
try {
ok = fs.statSync(dir).isDirectory()
} catch (er) {
code = er.code
} finally {
if (!ok)
throw new CwdError(dir, code)
}
}
module.exports.sync = (dir, opt) => {
dir = normPath(dir)
// if there's any overlap between mask and mode, // if there's any overlap between mask and mode,
// then we'll need an explicit chmod // then we'll need an explicit chmod
const umask = opt.umask const umask = opt.umask
...@@ -137,65 +158,51 @@ const mkdirSync = module.exports.sync = (dir, opt) => { ...@@ -137,65 +158,51 @@ const mkdirSync = module.exports.sync = (dir, opt) => {
const preserve = opt.preserve const preserve = opt.preserve
const unlink = opt.unlink const unlink = opt.unlink
const cache = opt.cache const cache = opt.cache
const cwd = opt.cwd const cwd = normPath(opt.cwd)
const done = (created) => { const done = (created) => {
cache.set(dir, true) cSet(cache, dir, true)
if (created && doChown) if (created && doChown)
chownr.sync(created, uid, gid) chownr.sync(created, uid, gid)
if (needChmod) if (needChmod)
fs.chmodSync(dir, mode) fs.chmodSync(dir, mode)
} }
if (cache && cache.get(dir) === true) if (cache && cGet(cache, dir) === true)
return done() return done()
if (dir === cwd) { if (dir === cwd) {
let ok = false checkCwdSync(cwd)
let code = 'ENOTDIR' return done()
try {
ok = fs.lstatSync(dir).isDirectory()
} catch (er) {
code = er.code
} finally {
if (!ok)
throw new CwdError(dir, code)
}
done()
return
} }
if (preserve) if (preserve)
return done(mkdirp.sync(dir, mode)) return done(mkdirp.sync(dir, mode))
const sub = path.relative(cwd, dir) const sub = normPath(path.relative(cwd, dir))
const parts = sub.split(/\/|\\/) const parts = sub.split('/')
let created = null let created = null
for (let p = parts.shift(), part = cwd; for (let p = parts.shift(), part = cwd;
p && (part += '/' + p); p && (part += '/' + p);
p = parts.shift()) { p = parts.shift()) {
part = normPath(path.resolve(part))
if (cache.get(part)) if (cGet(cache, part))
continue continue
try { try {
fs.mkdirSync(part, mode) fs.mkdirSync(part, mode)
created = created || part created = created || part
cache.set(part, true) cSet(cache, part, true)
} catch (er) { } catch (er) {
if (er.path && path.dirname(er.path) === cwd &&
(er.code === 'ENOTDIR' || er.code === 'ENOENT'))
return new CwdError(cwd, er.code)
const st = fs.lstatSync(part) const st = fs.lstatSync(part)
if (st.isDirectory()) { if (st.isDirectory()) {
cache.set(part, true) cSet(cache, part, true)
continue continue
} else if (unlink) { } else if (unlink) {
fs.unlinkSync(part) fs.unlinkSync(part)
fs.mkdirSync(part, mode) fs.mkdirSync(part, mode)
created = created || part created = created || part
cache.set(part, true) cSet(cache, part, true)
continue continue
} else if (st.isSymbolicLink()) } else if (st.isSymbolicLink())
return new SymlinkError(part, part + '/' + parts.join('/')) return new SymlinkError(part, part + '/' + parts.join('/'))
......
// on windows, either \ or / are valid directory separators.
// on unix, \ is a valid character in filenames.
// so, on windows, and only on windows, we replace all \ chars with /,
// so that we can use / as our one and only directory separator char.
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
module.exports = platform !== 'win32' ? p => p
: p => p && p.replace(/\\/g, '/')
...@@ -56,6 +56,7 @@ const ONDRAIN = Symbol('ondrain') ...@@ -56,6 +56,7 @@ const ONDRAIN = Symbol('ondrain')
const fs = require('fs') const fs = require('fs')
const path = require('path') const path = require('path')
const warner = require('./warn-mixin.js') const warner = require('./warn-mixin.js')
const normPath = require('./normalize-windows-path.js')
const Pack = warner(class Pack extends MiniPass { const Pack = warner(class Pack extends MiniPass {
constructor (opt) { constructor (opt) {
...@@ -67,7 +68,7 @@ const Pack = warner(class Pack extends MiniPass { ...@@ -67,7 +68,7 @@ const Pack = warner(class Pack extends MiniPass {
this.preservePaths = !!opt.preservePaths this.preservePaths = !!opt.preservePaths
this.strict = !!opt.strict this.strict = !!opt.strict
this.noPax = !!opt.noPax this.noPax = !!opt.noPax
this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '') this.prefix = normPath(opt.prefix || '')
this.linkCache = opt.linkCache || new Map() this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map() this.statCache = opt.statCache || new Map()
this.readdirCache = opt.readdirCache || new Map() this.readdirCache = opt.readdirCache || new Map()
...@@ -132,10 +133,7 @@ const Pack = warner(class Pack extends MiniPass { ...@@ -132,10 +133,7 @@ const Pack = warner(class Pack extends MiniPass {
} }
[ADDTARENTRY] (p) { [ADDTARENTRY] (p) {
const absolute = path.resolve(this.cwd, p.path) const absolute = normPath(path.resolve(this.cwd, p.path))
if (this.prefix)
p.path = this.prefix + '/' + p.path.replace(/^\.(\/+|$)/, '')
// in this case, we don't have to wait for the stat // in this case, we don't have to wait for the stat
if (!this.filter(p.path, p)) if (!this.filter(p.path, p))
p.resume() p.resume()
...@@ -151,10 +149,7 @@ const Pack = warner(class Pack extends MiniPass { ...@@ -151,10 +149,7 @@ const Pack = warner(class Pack extends MiniPass {
} }
[ADDFSENTRY] (p) { [ADDFSENTRY] (p) {
const absolute = path.resolve(this.cwd, p) const absolute = normPath(path.resolve(this.cwd, p))
if (this.prefix)
p = this.prefix + '/' + p.replace(/^\.(\/+|$)/, '')
this[QUEUE].push(new PackJob(p, absolute)) this[QUEUE].push(new PackJob(p, absolute))
this[PROCESS]() this[PROCESS]()
} }
...@@ -298,7 +293,8 @@ const Pack = warner(class Pack extends MiniPass { ...@@ -298,7 +293,8 @@ const Pack = warner(class Pack extends MiniPass {
linkCache: this.linkCache, linkCache: this.linkCache,
statCache: this.statCache, statCache: this.statCache,
noMtime: this.noMtime, noMtime: this.noMtime,
mtime: this.mtime mtime: this.mtime,
prefix: this.prefix,
} }
} }
...@@ -324,10 +320,7 @@ const Pack = warner(class Pack extends MiniPass { ...@@ -324,10 +320,7 @@ const Pack = warner(class Pack extends MiniPass {
if (job.readdir) if (job.readdir)
job.readdir.forEach(entry => { job.readdir.forEach(entry => {
const p = this.prefix ? const p = job.path
job.path.slice(this.prefix.length + 1) || './'
: job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/') const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry) this[ADDFSENTRY](base + entry)
}) })
...@@ -380,10 +373,7 @@ class PackSync extends Pack { ...@@ -380,10 +373,7 @@ class PackSync extends Pack {
if (job.readdir) if (job.readdir)
job.readdir.forEach(entry => { job.readdir.forEach(entry => {
const p = this.prefix ? const p = job.path
job.path.slice(this.prefix.length + 1) || './'
: job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/') const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry) this[ADDFSENTRY](base + entry)
}) })
......
...@@ -29,6 +29,7 @@ const maxMetaEntrySize = 1024 * 1024 ...@@ -29,6 +29,7 @@ const maxMetaEntrySize = 1024 * 1024
const Entry = require('./read-entry.js') const Entry = require('./read-entry.js')
const Pax = require('./pax.js') const Pax = require('./pax.js')
const zlib = require('minizlib') const zlib = require('minizlib')
const Buffer = require('./buffer.js')
const gzipHeader = Buffer.from([0x1f, 0x8b]) const gzipHeader = Buffer.from([0x1f, 0x8b])
const STATE = Symbol('state') const STATE = Symbol('state')
...@@ -101,7 +102,12 @@ module.exports = warner(class Parser extends EE { ...@@ -101,7 +102,12 @@ module.exports = warner(class Parser extends EE {
} }
[CONSUMEHEADER] (chunk, position) { [CONSUMEHEADER] (chunk, position) {
const header = new Header(chunk, position, this[EX], this[GEX]) let header
try {
header = new Header(chunk, position, this[EX], this[GEX])
} catch (er) {
return this.warn('invalid entry', er)
}
if (header.nullBlock) if (header.nullBlock)
this[EMIT]('nullBlock') this[EMIT]('nullBlock')
......
// A path exclusive reservation system
// reserve([list, of, paths], fn)
// When the fn is first in line for all its paths, it
// is called with a cb that clears the reservation.
//
// Used by async unpack to avoid clobbering paths in use,
// while still allowing maximal safe parallelization.
const assert = require('assert')
const normPath = require('./normalize-windows-path.js')
const stripSlashes = require('./strip-trailing-slashes.js')
const { join } = require('path')
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
module.exports = () => {
// path => [function or Set]
// A Set object means a directory reservation
// A fn is a direct reservation on that path
const queues = new Map()
// fn => {paths:[path,...], dirs:[path, ...]}
const reservations = new Map()
// return a set of parent dirs for a given path
// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
const getDirs = path => {
const dirs = path.split('/').slice(0, -1).reduce((set, path) => {
if (set.length)
path = normPath(join(set[set.length - 1], path))
set.push(path || '/')
return set
}, [])
return dirs
}
// functions currently running
const running = new Set()
// return the queues for each path the function cares about
// fn => {paths, dirs}
const getQueues = fn => {
const res = reservations.get(fn)
/* istanbul ignore if - unpossible */
if (!res)
throw new Error('function does not have any path reservations')
return {
paths: res.paths.map(path => queues.get(path)),
dirs: [...res.dirs].map(path => queues.get(path)),
}
}
// check if fn is first in line for all its paths, and is
// included in the first set for all its dir queues
const check = fn => {
const {paths, dirs} = getQueues(fn)
return paths.every(q => q[0] === fn) &&
dirs.every(q => q[0] instanceof Set && q[0].has(fn))
}
// run the function if it's first in line and not already running
const run = fn => {
if (running.has(fn) || !check(fn))
return false
running.add(fn)
fn(() => clear(fn))
return true
}
const clear = fn => {
if (!running.has(fn))
return false
const { paths, dirs } = reservations.get(fn)
const next = new Set()
paths.forEach(path => {
const q = queues.get(path)
assert.equal(q[0], fn)
if (q.length === 1)
queues.delete(path)
else {
q.shift()
if (typeof q[0] === 'function')
next.add(q[0])
else
q[0].forEach(fn => next.add(fn))
}
})
dirs.forEach(dir => {
const q = queues.get(dir)
assert(q[0] instanceof Set)
if (q[0].size === 1 && q.length === 1) {
queues.delete(dir)
} else if (q[0].size === 1) {
q.shift()
// must be a function or else the Set would've been reused
next.add(q[0])
} else
q[0].delete(fn)
})
running.delete(fn)
next.forEach(fn => run(fn))
return true
}
const reserve = (paths, fn) => {
// collide on matches across case and unicode normalization
// On windows, thanks to the magic of 8.3 shortnames, it is fundamentally
// impossible to determine whether two paths refer to the same thing on
// disk, without asking the kernel for a shortname.
// So, we just pretend that every path matches every other path here,
// effectively removing all parallelization on windows.
paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {
return stripSlashes(normPath(join(p)))
.normalize('NFKD')
.toLowerCase()
})
const dirs = new Set(
paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))
)
reservations.set(fn, {dirs, paths})
paths.forEach(path => {
const q = queues.get(path)
if (!q)
queues.set(path, [fn])
else
q.push(fn)
})
dirs.forEach(dir => {
const q = queues.get(dir)
if (!q)
queues.set(dir, [new Set([fn])])
else if (q[q.length-1] instanceof Set)
q[q.length-1].add(fn)
else
q.push(new Set([fn]))
})
return run(fn)
}
return { check, reserve }
}
'use strict' 'use strict'
const types = require('./types.js') const types = require('./types.js')
const MiniPass = require('minipass') const MiniPass = require('minipass')
const normPath = require('./normalize-windows-path.js')
const SLURP = Symbol('slurp') const SLURP = Symbol('slurp')
module.exports = class ReadEntry extends MiniPass { module.exports = class ReadEntry extends MiniPass {
constructor (header, ex, gex) { constructor (header, ex, gex) {
super() super()
// read entries always start life paused. this is to avoid the
// situation where Minipass's auto-ending empty streams results
// in an entry ending before we're ready for it.
this.pause()
this.extended = ex this.extended = ex
this.globalExtended = gex this.globalExtended = gex
this.header = header this.header = header
...@@ -43,7 +48,7 @@ module.exports = class ReadEntry extends MiniPass { ...@@ -43,7 +48,7 @@ module.exports = class ReadEntry extends MiniPass {
this.ignore = true this.ignore = true
} }
this.path = header.path this.path = normPath(header.path)
this.mode = header.mode this.mode = header.mode
if (this.mode) if (this.mode)
this.mode = this.mode & 0o7777 this.mode = this.mode & 0o7777
...@@ -55,7 +60,7 @@ module.exports = class ReadEntry extends MiniPass { ...@@ -55,7 +60,7 @@ module.exports = class ReadEntry extends MiniPass {
this.mtime = header.mtime this.mtime = header.mtime
this.atime = header.atime this.atime = header.atime
this.ctime = header.ctime this.ctime = header.ctime
this.linkpath = header.linkpath this.linkpath = normPath(header.linkpath)
this.uname = header.uname this.uname = header.uname
this.gname = header.gname this.gname = header.gname
...@@ -88,7 +93,7 @@ module.exports = class ReadEntry extends MiniPass { ...@@ -88,7 +93,7 @@ module.exports = class ReadEntry extends MiniPass {
// a global extended header, because that's weird. // a global extended header, because that's weird.
if (ex[k] !== null && ex[k] !== undefined && if (ex[k] !== null && ex[k] !== undefined &&
!(global && k === 'path')) !(global && k === 'path'))
this[k] = ex[k] this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k]
} }
} }
} }
...@@ -168,7 +168,8 @@ const replace = (opt, files, cb) => { ...@@ -168,7 +168,8 @@ const replace = (opt, files, cb) => {
fs.fstat(fd, (er, st) => { fs.fstat(fd, (er, st) => {
if (er) if (er)
return reject(er) return fs.close(fd, () => reject(er))
getPos(fd, st.size, (er, position) => { getPos(fd, st.size, (er, position) => {
if (er) if (er)
return reject(er) return reject(er)
......
// unix absolute paths are also absolute on win32, so we use this for both
const { isAbsolute, parse } = require('path').win32
// returns [root, stripped]
// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
// explicitly if it's the first character.
// drive-specific relative paths on Windows get their root stripped off even
// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
module.exports = path => {
let r = ''
let parsed = parse(path)
while (isAbsolute(path) || parsed.root) {
// windows will think that //x/y/z has a "root" of //x/y/
// but strip the //?/C:/ off of //?/C:/path
const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? '/'
: parsed.root
path = path.substr(root.length)
r += root
parsed = parse(path)
}
return [r, path]
}
// this is the only approach that was significantly faster than using
// str.replace(/\/+$/, '') for strings ending with a lot of / chars and
// containing multiple / chars.
const batchStrings = [
'/'.repeat(1024),
'/'.repeat(512),
'/'.repeat(256),
'/'.repeat(128),
'/'.repeat(64),
'/'.repeat(32),
'/'.repeat(16),
'/'.repeat(8),
'/'.repeat(4),
'/'.repeat(2),
'/',
]
module.exports = str => {
for (const s of batchStrings) {
while (str.length >= s.length && str.slice(-1 * s.length) === s)
str = str.slice(0, -1 * s.length)
}
return str
}
This diff is collapsed.
This diff is collapsed.
../mkdirp/bin/cmd.js
\ No newline at end of file
language: node_js
node_js:
- "0.8"
- "0.10"
- "0.12"
- "iojs"
before_install:
- npm install -g npm@~1.4.6
This software is released under the MIT license:
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
var argv = require('../')(process.argv.slice(2));
console.log(argv);
module.exports = function (args, opts) {
if (!opts) opts = {};
var flags = { bools : {}, strings : {}, unknownFn: null };
if (typeof opts['unknown'] === 'function') {
flags.unknownFn = opts['unknown'];
}
if (typeof opts['boolean'] === 'boolean' && opts['boolean']) {
flags.allBools = true;
} else {
[].concat(opts['boolean']).filter(Boolean).forEach(function (key) {
flags.bools[key] = true;
});
}
var aliases = {};
Object.keys(opts.alias || {}).forEach(function (key) {
aliases[key] = [].concat(opts.alias[key]);
aliases[key].forEach(function (x) {
aliases[x] = [key].concat(aliases[key].filter(function (y) {
return x !== y;
}));
});
});
[].concat(opts.string).filter(Boolean).forEach(function (key) {
flags.strings[key] = true;
if (aliases[key]) {
flags.strings[aliases[key]] = true;
}
});
var defaults = opts['default'] || {};
var argv = { _ : [] };
Object.keys(flags.bools).forEach(function (key) {
setArg(key, defaults[key] === undefined ? false : defaults[key]);
});
var notFlags = [];
if (args.indexOf('--') !== -1) {
notFlags = args.slice(args.indexOf('--')+1);
args = args.slice(0, args.indexOf('--'));
}
function argDefined(key, arg) {
return (flags.allBools && /^--[^=]+$/.test(arg)) ||
flags.strings[key] || flags.bools[key] || aliases[key];
}
function setArg (key, val, arg) {
if (arg && flags.unknownFn && !argDefined(key, arg)) {
if (flags.unknownFn(arg) === false) return;
}
var value = !flags.strings[key] && isNumber(val)
? Number(val) : val
;
setKey(argv, key.split('.'), value);
(aliases[key] || []).forEach(function (x) {
setKey(argv, x.split('.'), value);
});
}
function setKey (obj, keys, value) {
var o = obj;
for (var i = 0; i < keys.length-1; i++) {
var key = keys[i];
if (key === '__proto__') return;
if (o[key] === undefined) o[key] = {};
if (o[key] === Object.prototype || o[key] === Number.prototype
|| o[key] === String.prototype) o[key] = {};
if (o[key] === Array.prototype) o[key] = [];
o = o[key];
}
var key = keys[keys.length - 1];
if (key === '__proto__') return;
if (o === Object.prototype || o === Number.prototype
|| o === String.prototype) o = {};
if (o === Array.prototype) o = [];
if (o[key] === undefined || flags.bools[key] || typeof o[key] === 'boolean') {
o[key] = value;
}
else if (Array.isArray(o[key])) {
o[key].push(value);
}
else {
o[key] = [ o[key], value ];
}
}
function aliasIsBoolean(key) {
return aliases[key].some(function (x) {
return flags.bools[x];
});
}
for (var i = 0; i < args.length; i++) {
var arg = args[i];
if (/^--.+=/.test(arg)) {
// Using [\s\S] instead of . because js doesn't support the
// 'dotall' regex modifier. See:
// http://stackoverflow.com/a/1068308/13216
var m = arg.match(/^--([^=]+)=([\s\S]*)$/);
var key = m[1];
var value = m[2];
if (flags.bools[key]) {
value = value !== 'false';
}
setArg(key, value, arg);
}
else if (/^--no-.+/.test(arg)) {
var key = arg.match(/^--no-(.+)/)[1];
setArg(key, false, arg);
}
else if (/^--.+/.test(arg)) {
var key = arg.match(/^--(.+)/)[1];
var next = args[i + 1];
if (next !== undefined && !/^-/.test(next)
&& !flags.bools[key]
&& !flags.allBools
&& (aliases[key] ? !aliasIsBoolean(key) : true)) {
setArg(key, next, arg);
i++;
}
else if (/^(true|false)$/.test(next)) {
setArg(key, next === 'true', arg);
i++;
}
else {
setArg(key, flags.strings[key] ? '' : true, arg);
}
}
else if (/^-[^-]+/.test(arg)) {
var letters = arg.slice(1,-1).split('');
var broken = false;
for (var j = 0; j < letters.length; j++) {
var next = arg.slice(j+2);
if (next === '-') {
setArg(letters[j], next, arg)
continue;
}
if (/[A-Za-z]/.test(letters[j]) && /=/.test(next)) {
setArg(letters[j], next.split('=')[1], arg);
broken = true;
break;
}
if (/[A-Za-z]/.test(letters[j])
&& /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) {
setArg(letters[j], next, arg);
broken = true;
break;
}
if (letters[j+1] && letters[j+1].match(/\W/)) {
setArg(letters[j], arg.slice(j+2), arg);
broken = true;
break;
}
else {
setArg(letters[j], flags.strings[letters[j]] ? '' : true, arg);
}
}
var key = arg.slice(-1)[0];
if (!broken && key !== '-') {
if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1])
&& !flags.bools[key]
&& (aliases[key] ? !aliasIsBoolean(key) : true)) {
setArg(key, args[i+1], arg);
i++;
}
else if (args[i+1] && /^(true|false)$/.test(args[i+1])) {
setArg(key, args[i+1] === 'true', arg);
i++;
}
else {
setArg(key, flags.strings[key] ? '' : true, arg);
}
}
}
else {
if (!flags.unknownFn || flags.unknownFn(arg) !== false) {
argv._.push(
flags.strings['_'] || !isNumber(arg) ? arg : Number(arg)
);
}
if (opts.stopEarly) {
argv._.push.apply(argv._, args.slice(i + 1));
break;
}
}
}
Object.keys(defaults).forEach(function (key) {
if (!hasKey(argv, key.split('.'))) {
setKey(argv, key.split('.'), defaults[key]);
(aliases[key] || []).forEach(function (x) {
setKey(argv, x.split('.'), defaults[key]);
});
}
});
if (opts['--']) {
argv['--'] = new Array();
notFlags.forEach(function(key) {
argv['--'].push(key);
});
}
else {
notFlags.forEach(function(key) {
argv._.push(key);
});
}
return argv;
};
function hasKey (obj, keys) {
var o = obj;
keys.slice(0,-1).forEach(function (key) {
o = (o[key] || {});
});
var key = keys[keys.length - 1];
return key in o;
}
function isNumber (x) {
if (typeof x === 'number') return true;
if (/^0x[0-9a-f]+$/i.test(x)) return true;
return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x);
}
{
"name": "minimist",
"version": "1.2.5",
"description": "parse argument options",
"main": "index.js",
"devDependencies": {
"covert": "^1.0.0",
"tap": "~0.4.0",
"tape": "^3.5.0"
},
"scripts": {
"test": "tap test/*.js",
"coverage": "covert test/*.js"
},
"testling": {
"files": "test/*.js",
"browsers": [
"ie/6..latest",
"ff/5",
"firefox/latest",
"chrome/10",
"chrome/latest",
"safari/5.1",
"safari/latest",
"opera/12"
]
},
"repository": {
"type": "git",
"url": "git://github.com/substack/minimist.git"
},
"homepage": "https://github.com/substack/minimist",
"keywords": [
"argv",
"getopt",
"parser",
"optimist"
],
"author": {
"name": "James Halliday",
"email": "mail@substack.net",
"url": "http://substack.net"
},
"license": "MIT"
}
# minimist
parse argument options
This module is the guts of optimist's argument parser without all the
fanciful decoration.
# example
``` js
var argv = require('minimist')(process.argv.slice(2));
console.log(argv);
```
```
$ node example/parse.js -a beep -b boop
{ _: [], a: 'beep', b: 'boop' }
```
```
$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz
{ _: [ 'foo', 'bar', 'baz' ],
x: 3,
y: 4,
n: 5,
a: true,
b: true,
c: true,
beep: 'boop' }
```
# security
Previous versions had a prototype pollution bug that could cause privilege
escalation in some circumstances when handling untrusted user input.
Please use version 1.2.3 or later: https://snyk.io/vuln/SNYK-JS-MINIMIST-559764
# methods
``` js
var parseArgs = require('minimist')
```
## var argv = parseArgs(args, opts={})
Return an argument object `argv` populated with the array arguments from `args`.
`argv._` contains all the arguments that didn't have an option associated with
them.
Numeric-looking arguments will be returned as numbers unless `opts.string` or
`opts.boolean` is set for that argument name.
Any arguments after `'--'` will not be parsed and will end up in `argv._`.
options can be:
* `opts.string` - a string or array of strings argument names to always treat as
strings
* `opts.boolean` - a boolean, string or array of strings to always treat as
booleans. if `true` will treat all double hyphenated arguments without equal signs
as boolean (e.g. affects `--foo`, not `-f` or `--foo=bar`)
* `opts.alias` - an object mapping string names to strings or arrays of string
argument names to use as aliases
* `opts.default` - an object mapping string argument names to default values
* `opts.stopEarly` - when true, populate `argv._` with everything after the
first non-option
* `opts['--']` - when true, populate `argv._` with everything before the `--`
and `argv['--']` with everything after the `--`. Here's an example:
```
> require('./')('one two three -- four five --six'.split(' '), { '--': true })
{ _: [ 'one', 'two', 'three' ],
'--': [ 'four', 'five', '--six' ] }
```
Note that with `opts['--']` set, parsing for arguments still stops after the
`--`.
* `opts.unknown` - a function which is invoked with a command line parameter not
defined in the `opts` configuration object. If the function returns `false`, the
unknown option is not added to `argv`.
# install
With [npm](https://npmjs.org) do:
```
npm install minimist
```
# license
MIT
var parse = require('../');
var test = require('tape');
test('flag boolean true (default all --args to boolean)', function (t) {
var argv = parse(['moo', '--honk', 'cow'], {
boolean: true
});
t.deepEqual(argv, {
honk: true,
_: ['moo', 'cow']
});
t.deepEqual(typeof argv.honk, 'boolean');
t.end();
});
test('flag boolean true only affects double hyphen arguments without equals signs', function (t) {
var argv = parse(['moo', '--honk', 'cow', '-p', '55', '--tacos=good'], {
boolean: true
});
t.deepEqual(argv, {
honk: true,
tacos: 'good',
p: 55,
_: ['moo', 'cow']
});
t.deepEqual(typeof argv.honk, 'boolean');
t.end();
});
var parse = require('../');
var test = require('tape');
test('flag boolean default false', function (t) {
var argv = parse(['moo'], {
boolean: ['t', 'verbose'],
default: { verbose: false, t: false }
});
t.deepEqual(argv, {
verbose: false,
t: false,
_: ['moo']
});
t.deepEqual(typeof argv.verbose, 'boolean');
t.deepEqual(typeof argv.t, 'boolean');
t.end();
});
test('boolean groups', function (t) {
var argv = parse([ '-x', '-z', 'one', 'two', 'three' ], {
boolean: ['x','y','z']
});
t.deepEqual(argv, {
x : true,
y : false,
z : true,
_ : [ 'one', 'two', 'three' ]
});
t.deepEqual(typeof argv.x, 'boolean');
t.deepEqual(typeof argv.y, 'boolean');
t.deepEqual(typeof argv.z, 'boolean');
t.end();
});
test('boolean and alias with chainable api', function (t) {
var aliased = [ '-h', 'derp' ];
var regular = [ '--herp', 'derp' ];
var opts = {
herp: { alias: 'h', boolean: true }
};
var aliasedArgv = parse(aliased, {
boolean: 'herp',
alias: { h: 'herp' }
});
var propertyArgv = parse(regular, {
boolean: 'herp',
alias: { h: 'herp' }
});
var expected = {
herp: true,
h: true,
'_': [ 'derp' ]
};
t.same(aliasedArgv, expected);
t.same(propertyArgv, expected);
t.end();
});
test('boolean and alias with options hash', function (t) {
var aliased = [ '-h', 'derp' ];
var regular = [ '--herp', 'derp' ];
var opts = {
alias: { 'h': 'herp' },
boolean: 'herp'
};
var aliasedArgv = parse(aliased, opts);
var propertyArgv = parse(regular, opts);
var expected = {
herp: true,
h: true,
'_': [ 'derp' ]
};
t.same(aliasedArgv, expected);
t.same(propertyArgv, expected);
t.end();
});
test('boolean and alias array with options hash', function (t) {
var aliased = [ '-h', 'derp' ];
var regular = [ '--herp', 'derp' ];
var alt = [ '--harp', 'derp' ];
var opts = {
alias: { 'h': ['herp', 'harp'] },
boolean: 'h'
};
var aliasedArgv = parse(aliased, opts);
var propertyArgv = parse(regular, opts);
var altPropertyArgv = parse(alt, opts);
var expected = {
harp: true,
herp: true,
h: true,
'_': [ 'derp' ]
};
t.same(aliasedArgv, expected);
t.same(propertyArgv, expected);
t.same(altPropertyArgv, expected);
t.end();
});
test('boolean and alias using explicit true', function (t) {
var aliased = [ '-h', 'true' ];
var regular = [ '--herp', 'true' ];
var opts = {
alias: { h: 'herp' },
boolean: 'h'
};
var aliasedArgv = parse(aliased, opts);
var propertyArgv = parse(regular, opts);
var expected = {
herp: true,
h: true,
'_': [ ]
};
t.same(aliasedArgv, expected);
t.same(propertyArgv, expected);
t.end();
});
// regression, see https://github.com/substack/node-optimist/issues/71
test('boolean and --x=true', function(t) {
var parsed = parse(['--boool', '--other=true'], {
boolean: 'boool'
});
t.same(parsed.boool, true);
t.same(parsed.other, 'true');
parsed = parse(['--boool', '--other=false'], {
boolean: 'boool'
});
t.same(parsed.boool, true);
t.same(parsed.other, 'false');
t.end();
});
test('boolean --boool=true', function (t) {
var parsed = parse(['--boool=true'], {
default: {
boool: false
},
boolean: ['boool']
});
t.same(parsed.boool, true);
t.end();
});
test('boolean --boool=false', function (t) {
var parsed = parse(['--boool=false'], {
default: {
boool: true
},
boolean: ['boool']
});
t.same(parsed.boool, false);
t.end();
});
test('boolean using something similar to true', function (t) {
var opts = { boolean: 'h' };
var result = parse(['-h', 'true.txt'], opts);
var expected = {
h: true,
'_': ['true.txt']
};
t.same(result, expected);
t.end();
});
\ No newline at end of file
var parse = require('../');
var test = require('tape');
test('-', function (t) {
t.plan(5);
t.deepEqual(parse([ '-n', '-' ]), { n: '-', _: [] });
t.deepEqual(parse([ '-' ]), { _: [ '-' ] });
t.deepEqual(parse([ '-f-' ]), { f: '-', _: [] });
t.deepEqual(
parse([ '-b', '-' ], { boolean: 'b' }),
{ b: true, _: [ '-' ] }
);
t.deepEqual(
parse([ '-s', '-' ], { string: 's' }),
{ s: '-', _: [] }
);
});
test('-a -- b', function (t) {
t.plan(3);
t.deepEqual(parse([ '-a', '--', 'b' ]), { a: true, _: [ 'b' ] });
t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] });
t.deepEqual(parse([ '--a', '--', 'b' ]), { a: true, _: [ 'b' ] });
});
test('move arguments after the -- into their own `--` array', function(t) {
t.plan(1);
t.deepEqual(
parse([ '--name', 'John', 'before', '--', 'after' ], { '--': true }),
{ name: 'John', _: [ 'before' ], '--': [ 'after' ] });
});
var test = require('tape');
var parse = require('../');
test('boolean default true', function (t) {
var argv = parse([], {
boolean: 'sometrue',
default: { sometrue: true }
});
t.equal(argv.sometrue, true);
t.end();
});
test('boolean default false', function (t) {
var argv = parse([], {
boolean: 'somefalse',
default: { somefalse: false }
});
t.equal(argv.somefalse, false);
t.end();
});
test('boolean default to null', function (t) {
var argv = parse([], {
boolean: 'maybe',
default: { maybe: null }
});
t.equal(argv.maybe, null);
var argv = parse(['--maybe'], {
boolean: 'maybe',
default: { maybe: null }
});
t.equal(argv.maybe, true);
t.end();
})
var parse = require('../');
var test = require('tape');
test('dotted alias', function (t) {
var argv = parse(['--a.b', '22'], {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}});
t.equal(argv.a.b, 22);
t.equal(argv.aa.bb, 22);
t.end();
});
test('dotted default', function (t) {
var argv = parse('', {default: {'a.b': 11}, alias: {'a.b': 'aa.bb'}});
t.equal(argv.a.b, 11);
t.equal(argv.aa.bb, 11);
t.end();
});
test('dotted default with no alias', function (t) {
var argv = parse('', {default: {'a.b': 11}});
t.equal(argv.a.b, 11);
t.end();
});
var parse = require('../');
var test = require('tape');
test('short -k=v' , function (t) {
t.plan(1);
var argv = parse([ '-b=123' ]);
t.deepEqual(argv, { b: 123, _: [] });
});
test('multi short -k=v' , function (t) {
t.plan(1);
var argv = parse([ '-a=whatever', '-b=robots' ]);
t.deepEqual(argv, { a: 'whatever', b: 'robots', _: [] });
});
var test = require('tape');
var parse = require('../');
test('long opts', function (t) {
t.deepEqual(
parse([ '--bool' ]),
{ bool : true, _ : [] },
'long boolean'
);
t.deepEqual(
parse([ '--pow', 'xixxle' ]),
{ pow : 'xixxle', _ : [] },
'long capture sp'
);
t.deepEqual(
parse([ '--pow=xixxle' ]),
{ pow : 'xixxle', _ : [] },
'long capture eq'
);
t.deepEqual(
parse([ '--host', 'localhost', '--port', '555' ]),
{ host : 'localhost', port : 555, _ : [] },
'long captures sp'
);
t.deepEqual(
parse([ '--host=localhost', '--port=555' ]),
{ host : 'localhost', port : 555, _ : [] },
'long captures eq'
);
t.end();
});
var parse = require('../');
var test = require('tape');
test('nums', function (t) {
var argv = parse([
'-x', '1234',
'-y', '5.67',
'-z', '1e7',
'-w', '10f',
'--hex', '0xdeadbeef',
'789'
]);
t.deepEqual(argv, {
x : 1234,
y : 5.67,
z : 1e7,
w : '10f',
hex : 0xdeadbeef,
_ : [ 789 ]
});
t.deepEqual(typeof argv.x, 'number');
t.deepEqual(typeof argv.y, 'number');
t.deepEqual(typeof argv.z, 'number');
t.deepEqual(typeof argv.w, 'string');
t.deepEqual(typeof argv.hex, 'number');
t.deepEqual(typeof argv._[0], 'number');
t.end();
});
test('already a number', function (t) {
var argv = parse([ '-x', 1234, 789 ]);
t.deepEqual(argv, { x : 1234, _ : [ 789 ] });
t.deepEqual(typeof argv.x, 'number');
t.deepEqual(typeof argv._[0], 'number');
t.end();
});
This diff is collapsed.
var parse = require('../');
var test = require('tape');
test('parse with modifier functions' , function (t) {
t.plan(1);
var argv = parse([ '-b', '123' ], { boolean: 'b' });
t.deepEqual(argv, { b: true, _: [123] });
});
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment