Commit 891b7197 authored by Medicean's avatar Medicean

add thirdparty tar

parent dd7e6a95
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
Like `chown -R`.
Takes the same arguments as `fs.chown()`
module.exports = chownr
chownr.sync = chownrSync
var fs = require("fs")
, path = require("path")
function chownr (p, uid, gid, cb) {
fs.readdir(p, function (er, children) {
// any error other than ENOTDIR means it's not readable, or
// doesn't exist. give up.
if (er && er.code !== "ENOTDIR") return cb(er)
if (er || !children.length) return fs.chown(p, uid, gid, cb)
var len = children.length
, errState = null
children.forEach(function (child) {
var pathChild = path.resolve(p, child);
fs.lstat(pathChild, function(er, stats) {
if (er)
return cb(er)
if (!stats.isSymbolicLink())
chownr(pathChild, uid, gid, then)
else
then()
})
})
function then (er) {
if (errState) return
if (er) return cb(errState = er)
if (-- len === 0) return fs.chown(p, uid, gid, cb)
}
})
}
function chownrSync (p, uid, gid) {
var children
try {
children = fs.readdirSync(p)
} catch (er) {
if (er && er.code === "ENOTDIR") return fs.chownSync(p, uid, gid)
throw er
}
if (!children.length) return fs.chownSync(p, uid, gid)
children.forEach(function (child) {
var pathChild = path.resolve(p, child)
var stats = fs.lstatSync(pathChild)
if (!stats.isSymbolicLink())
chownrSync(pathChild, uid, gid)
})
return fs.chownSync(p, uid, gid)
}
{
"_from": "chownr@^1.0.1",
"_id": "chownr@1.0.1",
"_inBundle": false,
"_integrity": "sha1-4qdQQqlVGQi+vSW4Uj1fl2nXkYE=",
"_location": "/chownr",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "chownr@^1.0.1",
"name": "chownr",
"escapedName": "chownr",
"rawSpec": "^1.0.1",
"saveSpec": null,
"fetchSpec": "^1.0.1"
},
"_requiredBy": [
"/tar"
],
"_resolved": "http://registry.npm.taobao.org/chownr/download/chownr-1.0.1.tgz",
"_shasum": "e2a75042a9551908bebd25b8523d5f9769d79181",
"_spec": "chownr@^1.0.1",
"_where": "/Users/medicean/workspace/antSword/node_modules/tar",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/chownr/issues"
},
"bundleDependencies": false,
"deprecated": false,
"description": "like `chown -R`",
"devDependencies": {
"mkdirp": "0.3",
"rimraf": "",
"tap": "^1.2.0"
},
"files": [
"chownr.js"
],
"homepage": "https://github.com/isaacs/chownr#readme",
"license": "ISC",
"main": "chownr.js",
"name": "chownr",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/chownr.git"
},
"scripts": {
"test": "tap test/*.js"
},
"version": "1.0.1"
}
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# fs-minipass
Filesystem streams based on [minipass](http://npm.im/minipass).
4 classes are exported:
- ReadStream
- ReadStreamSync
- WriteStream
- WriteStreamSync
When using `ReadStreamSync`, all of the data is made available
immediately upon consuming the stream. Nothing is buffered in memory
when the stream is constructed. If the stream is piped to a writer,
then it will synchronously `read()` and emit data into the writer as
fast as the writer can consume it. (That is, it will respect
backpressure.) If you call `stream.read()` then it will read the
entire file and return the contents.
When using `WriteStreamSync`, every write is flushed to the file
synchronously. If your writes all come in a single tick, then it'll
write it all out in a single tick. It's as synchronous as you are.
The async versions work much like their node builtin counterparts,
with the exception of introducing significantly less Stream machinery
overhead.
## USAGE
It's just streams, you pipe them or read() them or write() to them.
```js
const fsm = require('fs-minipass')
const readStream = new fsm.ReadStream('file.txt')
const writeStream = new fsm.WriteStream('output.txt')
writeStream.write('some file header or whatever\n')
readStream.pipe(writeStream)
```
## ReadStream(path, options)
Path string is required, but somewhat irrelevant if an open file
descriptor is passed in as an option.
Options:
- `fd` Pass in a numeric file descriptor, if the file is already open.
- `readSize` The size of reads to do, defaults to 16MB
- `size` The size of the file, if known. Prevents zero-byte read()
call at the end.
- `autoClose` Set to `false` to prevent the file descriptor from being
closed when the file is done being read.
## WriteStream(path, options)
Path string is required, but somewhat irrelevant if an open file
descriptor is passed in as an option.
Options:
- `fd` Pass in a numeric file descriptor, if the file is already open.
- `mode` The mode to create the file with. Defaults to `0o666`.
- `start` The position in the file to start reading. If not
specified, then the file will start writing at position zero, and be
truncated by default.
- `autoClose` Set to `false` to prevent the file descriptor from being
closed when the stream is ended.
- `flags` Flags to use when opening the file. Irrelevant if `fd` is
passed in, since file won't be opened in that case. Defaults to
`'a'` if a `pos` is specified, or `'w'` otherwise.
'use strict'
const MiniPass = require('minipass')
const EE = require('events').EventEmitter
const fs = require('fs')
// for writev
const binding = process.binding('fs')
const writeBuffers = binding.writeBuffers
const FSReqWrap = binding.FSReqWrap
const _autoClose = Symbol('_autoClose')
const _close = Symbol('_close')
const _ended = Symbol('_ended')
const _fd = Symbol('_fd')
const _finished = Symbol('_finished')
const _flags = Symbol('_flags')
const _flush = Symbol('_flush')
const _handleChunk = Symbol('_handleChunk')
const _makeBuf = Symbol('_makeBuf')
const _mode = Symbol('_mode')
const _needDrain = Symbol('_needDrain')
const _onerror = Symbol('_onerror')
const _onopen = Symbol('_onopen')
const _onread = Symbol('_onread')
const _onwrite = Symbol('_onwrite')
const _open = Symbol('_open')
const _path = Symbol('_path')
const _pos = Symbol('_pos')
const _queue = Symbol('_queue')
const _read = Symbol('_read')
const _readSize = Symbol('_readSize')
const _reading = Symbol('_reading')
const _remain = Symbol('_remain')
const _size = Symbol('_size')
const _write = Symbol('_write')
const _writing = Symbol('_writing')
const _defaultFlag = Symbol('_defaultFlag')
class ReadStream extends MiniPass {
constructor (path, opt) {
opt = opt || {}
super(opt)
this.writable = false
if (typeof path !== 'string')
throw new TypeError('path must be a string')
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_path] = path
this[_readSize] = opt.readSize || 16*1024*1024
this[_reading] = false
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity
this[_remain] = this[_size]
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
if (typeof this[_fd] === 'number')
this[_read]()
else
this[_open]()
}
get fd () { return this[_fd] }
get path () { return this[_path] }
write () {
throw new TypeError('this is a readable stream')
}
end () {
throw new TypeError('this is a readable stream')
}
[_open] () {
fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd))
}
[_onopen] (er, fd) {
if (er)
this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_read]()
}
}
[_makeBuf] () {
return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]))
}
[_read] () {
if (!this[_reading]) {
this[_reading] = true
const buf = this[_makeBuf]()
/* istanbul ignore if */
if (buf.length === 0) return process.nextTick(() => this[_onread](null, 0, buf))
fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) =>
this[_onread](er, br, buf))
}
}
[_onread] (er, br, buf) {
this[_reading] = false
if (er)
this[_onerror](er)
else if (this[_handleChunk](br, buf))
this[_read]()
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
fs.close(this[_fd], _ => this.emit('close'))
this[_fd] = null
}
}
[_onerror] (er) {
this[_reading] = true
this[_close]()
this.emit('error', er)
}
[_handleChunk] (br, buf) {
let ret = false
// no effect if infinite
this[_remain] -= br
if (br > 0)
ret = super.write(br < buf.length ? buf.slice(0, br) : buf)
if (br === 0 || this[_remain] <= 0) {
ret = false
this[_close]()
super.end()
}
return ret
}
emit (ev, data) {
switch (ev) {
case 'prefinish':
case 'finish':
break
case 'drain':
if (typeof this[_fd] === 'number')
this[_read]()
break
default:
return super.emit(ev, data)
}
}
}
class ReadStreamSync extends ReadStream {
[_open] () {
let threw = true
try {
this[_onopen](null, fs.openSync(this[_path], 'r'))
threw = false
} finally {
if (threw)
this[_close]()
}
}
[_read] () {
let threw = true
try {
if (!this[_reading]) {
this[_reading] = true
do {
const buf = this[_makeBuf]()
/* istanbul ignore next */
const br = buf.length === 0 ? 0 : fs.readSync(this[_fd], buf, 0, buf.length, null)
if (!this[_handleChunk](br, buf))
break
} while (true)
this[_reading] = false
}
threw = false
} finally {
if (threw)
this[_close]()
}
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
try {
fs.closeSync(this[_fd])
} catch (er) {}
this[_fd] = null
this.emit('close')
}
}
}
class WriteStream extends EE {
constructor (path, opt) {
opt = opt || {}
super(opt)
this.readable = false
this[_writing] = false
this[_ended] = false
this[_needDrain] = false
this[_queue] = []
this[_path] = path
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode
this[_pos] = typeof opt.start === 'number' ? opt.start : null
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
// truncating makes no sense when writing into the middle
const defaultFlag = this[_pos] !== null ? 'r+' : 'w'
this[_defaultFlag] = opt.flags === undefined
this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags
if (this[_fd] === null)
this[_open]()
}
get fd () { return this[_fd] }
get path () { return this[_path] }
[_onerror] (er) {
this[_close]()
this[_writing] = true
this.emit('error', er)
}
[_open] () {
fs.open(this[_path], this[_flags], this[_mode],
(er, fd) => this[_onopen](er, fd))
}
[_onopen] (er, fd) {
if (this[_defaultFlag] &&
this[_flags] === 'r+' &&
er && er.code === 'ENOENT') {
this[_flags] = 'w'
this[_open]()
} else if (er)
this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_flush]()
}
}
end (buf, enc) {
if (buf)
this.write(buf, enc)
this[_ended] = true
// synthetic after-write logic, where drain/finish live
if (!this[_writing] && !this[_queue].length &&
typeof this[_fd] === 'number')
this[_onwrite](null, 0)
}
write (buf, enc) {
if (typeof buf === 'string')
buf = new Buffer(buf, enc)
if (this[_ended]) {
this.emit('error', new Error('write() after end()'))
return false
}
if (this[_fd] === null || this[_writing] || this[_queue].length) {
this[_queue].push(buf)
this[_needDrain] = true
return false
}
this[_writing] = true
this[_write](buf)
return true
}
[_write] (buf) {
fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) =>
this[_onwrite](er, bw))
}
[_onwrite] (er, bw) {
if (er)
this[_onerror](er)
else {
if (this[_pos] !== null)
this[_pos] += bw
if (this[_queue].length)
this[_flush]()
else {
this[_writing] = false
if (this[_ended] && !this[_finished]) {
this[_finished] = true
this[_close]()
this.emit('finish')
} else if (this[_needDrain]) {
this[_needDrain] = false
this.emit('drain')
}
}
}
}
[_flush] () {
if (this[_queue].length === 0) {
if (this[_ended])
this[_onwrite](null, 0)
} else if (this[_queue].length === 1)
this[_write](this[_queue].pop())
else {
const iovec = this[_queue]
this[_queue] = []
writev(this[_fd], iovec, this[_pos],
(er, bw) => this[_onwrite](er, bw))
}
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
fs.close(this[_fd], _ => this.emit('close'))
this[_fd] = null
}
}
}
class WriteStreamSync extends WriteStream {
[_open] () {
let fd
try {
fd = fs.openSync(this[_path], this[_flags], this[_mode])
} catch (er) {
if (this[_defaultFlag] &&
this[_flags] === 'r+' &&
er && er.code === 'ENOENT') {
this[_flags] = 'w'
return this[_open]()
} else
throw er
}
this[_onopen](null, fd)
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
try {
fs.closeSync(this[_fd])
} catch (er) {}
this[_fd] = null
this.emit('close')
}
}
[_write] (buf) {
try {
this[_onwrite](null,
fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]))
} catch (er) {
this[_onwrite](er, 0)
}
}
}
const writev = (fd, iovec, pos, cb) => {
const done = (er, bw) => cb(er, bw, iovec)
const req = new FSReqWrap()
req.oncomplete = done
binding.writeBuffers(fd, iovec, pos, req)
}
exports.ReadStream = ReadStream
exports.ReadStreamSync = ReadStreamSync
exports.WriteStream = WriteStream
exports.WriteStreamSync = WriteStreamSync
{
"_from": "fs-minipass@^1.2.5",
"_id": "fs-minipass@1.2.5",
"_inBundle": false,
"_integrity": "sha1-BsJ3IYRU7CiN93raVKA7hwKqy50=",
"_location": "/fs-minipass",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "fs-minipass@^1.2.5",
"name": "fs-minipass",
"escapedName": "fs-minipass",
"rawSpec": "^1.2.5",
"saveSpec": null,
"fetchSpec": "^1.2.5"
},
"_requiredBy": [
"/tar"
],
"_resolved": "http://registry.npm.taobao.org/fs-minipass/download/fs-minipass-1.2.5.tgz",
"_shasum": "06c277218454ec288df77ada54a03b8702aacb9d",
"_spec": "fs-minipass@^1.2.5",
"_where": "/Users/medicean/workspace/antSword/node_modules/tar",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/npm/fs-minipass/issues"
},
"bundleDependencies": false,
"dependencies": {
"minipass": "^2.2.1"
},
"deprecated": false,
"description": "fs read and write streams based on minipass",
"devDependencies": {
"mutate-fs": "^2.0.1",
"tap": "^10.7.2"
},
"files": [
"index.js"
],
"homepage": "https://github.com/npm/fs-minipass#readme",
"keywords": [],
"license": "ISC",
"main": "index.js",
"name": "fs-minipass",
"repository": {
"type": "git",
"url": "git+https://github.com/npm/fs-minipass.git"
},
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js --100 -J"
},
"version": "1.2.5"
}
The ISC License
Copyright (c) npm, Inc. and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# minipass
A _very_ minimal implementation of a [PassThrough
stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough)
[It's very
fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0)
for objects, strings, and buffers.
Supports pipe()ing (including multi-pipe() and backpressure
transmission), buffering data until either a `data` event handler or
`pipe()` is added (so you don't lose the first chunk), and most other
cases where PassThrough is a good idea.
There is a `read()` method, but it's much more efficient to consume
data from this stream via `'data'` events or by calling `pipe()` into
some other stream. Calling `read()` requires the buffer to be
flattened in some cases, which requires copying memory.
There is also no `unpipe()` method. Once you start piping, there is
no stopping it!
If you set `objectMode: true` in the options, then whatever is written
will be emitted. Otherwise, it'll do a minimal amount of Buffer
copying to ensure proper Streams semantics when `read(n)` is called.
This is not a `through` or `through2` stream. It doesn't transform
the data, it just passes it right through. If you want to transform
the data, extend the class, and override the `write()` method. Once
you're done transforming the data however you want, call
`super.write()` with the transform output.
For an example of a stream that extends MiniPass to provide transform
capabilities, check out [minizlib](http://npm.im/minizlib).
## USAGE
```js
const MiniPass = require('minipass')
const mp = new MiniPass(options) // optional: { encoding }
mp.write('foo')
mp.pipe(someOtherStream)
mp.end('bar')
```
### collecting
```js
mp.collect().then(all => {
// all is an array of all the data emitted
// encoding is supported in this case, so
// so the result will be a collection of strings if
// an encoding is specified, or buffers/objects if not.
//
// In an async function, you may do
// const data = await stream.collect()
})
```
### iteration
You can iterate over streams synchronously or asynchronously in
platforms that support it.
Synchronous iteration will end when the currently available data is
consumed, even if the `end` event has not been reached. In string and
buffer mode, the data is concatenated, so unless multiple writes are
occurring in the same tick as the `read()`, sync iteration loops will
generally only have a single iteration.
To consume chunks in this way exactly as they have been written, with
no flattening, create the stream with the `{ objectMode: true }`
option.
```js
const mp = new Minipass({ objectMode: true })
mp.write('a')
mp.write('b')
for (let letter of mp) {
console.log(letter) // a, b
}
mp.write('c')
mp.write('d')
for (let letter of mp) {
console.log(letter) // c, d
}
mp.write('e')
mp.end()
for (let letter of mp) {
console.log(letter) // e
}
for (let letter of mp) {
console.log(letter) // nothing
}
```
Asynchronous iteration will continue until the end event is reached,
consuming all of the data.
```js
const mp = new Minipass({ encoding: 'utf8' })
// some source of some data
let i = 5
const inter = setInterval(() => {
if (i --> 0)
mp.write(Buffer.from('foo\n', 'utf8'))
else {
mp.end()
clearInterval(inter)
}
}, 100)
// consume the data with asynchronous iteration
async function consume () {
for await (let chunk of mp) {
console.log(chunk)
}
return 'ok'
}
consume().then(res => console.log(res))
// logs `foo\n` 5 times, and then `ok`
```
'use strict'
const EE = require('events')
const Yallist = require('yallist')
const EOF = Symbol('EOF')
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
const EMITTED_END = Symbol('emittedEnd')
const CLOSED = Symbol('closed')
const READ = Symbol('read')
const FLUSH = Symbol('flush')
const doIter = process.env._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR = doIter && Symbol.asyncIterator || Symbol('asyncIterator not implemented')
const ITERATOR = doIter && Symbol.iterator || Symbol('iterator not implemented')
const FLUSHCHUNK = Symbol('flushChunk')
const SD = require('string_decoder').StringDecoder
const ENCODING = Symbol('encoding')
const DECODER = Symbol('decoder')
const FLOWING = Symbol('flowing')
const RESUME = Symbol('resume')
const BUFFERLENGTH = Symbol('bufferLength')
const BUFFERPUSH = Symbol('bufferPush')
const BUFFERSHIFT = Symbol('bufferShift')
const OBJECTMODE = Symbol('objectMode')
// Buffer in node 4.x < 4.5.0 doesn't have working Buffer.from
// or Buffer.alloc, and Buffer in node 10 deprecated the ctor.
// .M, this is fine .\^/M..
let B = Buffer
/* istanbul ignore next */
if (!B.alloc) {
B = require('safe-buffer').Buffer
}
module.exports = class MiniPass extends EE {
constructor (options) {
super()
this[FLOWING] = false
this.pipes = new Yallist()
this.buffer = new Yallist()
this[OBJECTMODE] = options && options.objectMode || false
if (this[OBJECTMODE])
this[ENCODING] = null
else
this[ENCODING] = options && options.encoding || null
if (this[ENCODING] === 'buffer')
this[ENCODING] = null
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
this[EOF] = false
this[EMITTED_END] = false
this[CLOSED] = false
this.writable = true
this.readable = true
this[BUFFERLENGTH] = 0
}
get bufferLength () { return this[BUFFERLENGTH] }
get encoding () { return this[ENCODING] }
set encoding (enc) {
if (this[OBJECTMODE])
throw new Error('cannot set encoding in objectMode')
if (this[ENCODING] && enc !== this[ENCODING] &&
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
throw new Error('cannot change encoding')
if (this[ENCODING] !== enc) {
this[DECODER] = enc ? new SD(enc) : null
if (this.buffer.length)
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
}
this[ENCODING] = enc
}
setEncoding (enc) {
this.encoding = enc
}
write (chunk, encoding, cb) {
if (this[EOF])
throw new Error('write after end')
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (!encoding)
encoding = 'utf8'
// fast-path writing strings of same encoding to a stream with
// an empty buffer, skipping the buffer/decoder dance
if (typeof chunk === 'string' && !this[OBJECTMODE] &&
// unless it is a string already ready for us to use
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
chunk = B.from(chunk, encoding)
}
if (B.isBuffer(chunk) && this[ENCODING])
chunk = this[DECODER].write(chunk)
try {
return this.flowing
? (this.emit('data', chunk), this.flowing)
: (this[BUFFERPUSH](chunk), false)
} finally {
this.emit('readable')
if (cb)
cb()
}
}
read (n) {
try {
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH])
return null
if (this[OBJECTMODE])
n = null
if (this.buffer.length > 1 && !this[OBJECTMODE]) {
if (this.encoding)
this.buffer = new Yallist([
Array.from(this.buffer).join('')
])
else
this.buffer = new Yallist([
B.concat(Array.from(this.buffer), this[BUFFERLENGTH])
])
}
return this[READ](n || null, this.buffer.head.value)
} finally {
this[MAYBE_EMIT_END]()
}
}
[READ] (n, chunk) {
if (n === chunk.length || n === null)
this[BUFFERSHIFT]()
else {
this.buffer.head.value = chunk.slice(n)
chunk = chunk.slice(0, n)
this[BUFFERLENGTH] -= n
}
this.emit('data', chunk)
if (!this.buffer.length && !this[EOF])
this.emit('drain')
return chunk
}
end (chunk, encoding, cb) {
if (typeof chunk === 'function')
cb = chunk, chunk = null
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (chunk)
this.write(chunk, encoding)
if (cb)
this.once('end', cb)
this[EOF] = true
this.writable = false
if (this.flowing)
this[MAYBE_EMIT_END]()
}
// don't let the internal resume be overwritten
[RESUME] () {
this[FLOWING] = true
this.emit('resume')
if (this.buffer.length)
this[FLUSH]()
else if (this[EOF])
this[MAYBE_EMIT_END]()
else
this.emit('drain')
}
resume () {
return this[RESUME]()
}
pause () {
this[FLOWING] = false
}
get flowing () {
return this[FLOWING]
}
[BUFFERPUSH] (chunk) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] += 1
else
this[BUFFERLENGTH] += chunk.length
return this.buffer.push(chunk)
}
[BUFFERSHIFT] () {
if (this.buffer.length) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] -= 1
else
this[BUFFERLENGTH] -= this.buffer.head.value.length
}
return this.buffer.shift()
}
[FLUSH] () {
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
if (!this.buffer.length && !this[EOF])
this.emit('drain')
}
[FLUSHCHUNK] (chunk) {
return chunk ? (this.emit('data', chunk), this.flowing) : false
}
pipe (dest, opts) {
if (dest === process.stdout || dest === process.stderr)
(opts = opts || {}).end = false
const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() }
this.pipes.push(p)
dest.on('drain', p.ondrain)
this[RESUME]()
return dest
}
addListener (ev, fn) {
return this.on(ev, fn)
}
on (ev, fn) {
try {
return super.on(ev, fn)
} finally {
if (ev === 'data' && !this.pipes.length && !this.flowing)
this[RESUME]()
else if (ev === 'end' && this[EMITTED_END]) {
super.emit('end')
this.removeAllListeners('end')
}
}
}
get emittedEnd () {
return this[EMITTED_END]
}
[MAYBE_EMIT_END] () {
if (!this[EMITTED_END] && this.buffer.length === 0 && this[EOF]) {
this.emit('end')
this.emit('prefinish')
this.emit('finish')
if (this[CLOSED])
this.emit('close')
}
}
emit (ev, data) {
if (ev === 'data') {
if (!data)
return
if (this.pipes.length)
this.pipes.forEach(p => p.dest.write(data) || this.pause())
} else if (ev === 'end') {
if (this[EMITTED_END] === true)
return
this[EMITTED_END] = true
this.readable = false
if (this[DECODER]) {
data = this[DECODER].end()
if (data) {
this.pipes.forEach(p => p.dest.write(data))
super.emit('data', data)
}
}
this.pipes.forEach(p => {
p.dest.removeListener('drain', p.ondrain)
if (!p.opts || p.opts.end !== false)
p.dest.end()
})
} else if (ev === 'close') {
this[CLOSED] = true
// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END])
return
}
const args = new Array(arguments.length)
args[0] = ev
args[1] = data
if (arguments.length > 2) {
for (let i = 2; i < arguments.length; i++) {
args[i] = arguments[i]
}
}
try {
return super.emit.apply(this, args)
} finally {
if (ev !== 'end')
this[MAYBE_EMIT_END]()
else
this.removeAllListeners('end')
}
}
// const all = await stream.collect()
collect () {
return new Promise((resolve, reject) => {
const buf = []
this.on('data', c => buf.push(c))
this.on('end', () => resolve(buf))
this.on('error', reject)
})
}
// for await (let chunk of stream)
[ASYNCITERATOR] () {
const next = () => {
const res = this.read()
if (res !== null)
return Promise.resolve({ done: false, value: res })
if (this[EOF])
return Promise.resolve({ done: true })
let resolve = null
let reject = null
const onerr = er => {
this.removeListener('data', ondata)
this.removeListener('end', onend)
reject(er)
}
const ondata = value => {
this.removeListener('error', onerr)
this.removeListener('end', onend)
this.pause()
resolve({ value: value, done: !!this[EOF] })
}
const onend = () => {
this.removeListener('error', onerr)
this.removeListener('data', ondata)
resolve({ done: true })
}
return new Promise((res, rej) => {
reject = rej
resolve = res
this.once('error', onerr)
this.once('end', onend)
this.once('data', ondata)
this.resume()
})
}
return { next }
}
// for (let chunk of stream)
[ITERATOR] () {
const next = () => {
const value = this.read()
const done = value === null
return { value, done }
}
return { next }
}
}
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# yallist
Yet Another Linked List
There are many doubly-linked list implementations like it, but this
one is mine.
For when an array would be too big, and a Map can't be iterated in
reverse order.
[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist)
## basic usage
```javascript
var yallist = require('yallist')
var myList = yallist.create([1, 2, 3])
myList.push('foo')
myList.unshift('bar')
// of course pop() and shift() are there, too
console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo']
myList.forEach(function (k) {
// walk the list head to tail
})
myList.forEachReverse(function (k, index, list) {
// walk the list tail to head
})
var myDoubledList = myList.map(function (k) {
return k + k
})
// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo']
// mapReverse is also a thing
var myDoubledListReverse = myList.mapReverse(function (k) {
return k + k
}) // ['foofoo', 6, 4, 2, 'barbar']
var reduced = myList.reduce(function (set, entry) {
set += entry
return set
}, 'start')
console.log(reduced) // 'startfoo123bar'
```
## api
The whole API is considered "public".
Functions with the same name as an Array method work more or less the
same way.
There's reverse versions of most things because that's the point.
### Yallist
Default export, the class that holds and manages a list.
Call it with either a forEach-able (like an array) or a set of
arguments, to initialize the list.
The Array-ish methods all act like you'd expect. No magic length,
though, so if you change that it won't automatically prune or add
empty spots.
### Yallist.create(..)
Alias for Yallist function. Some people like factories.
#### yallist.head
The first node in the list
#### yallist.tail
The last node in the list
#### yallist.length
The number of nodes in the list. (Change this at your peril. It is
not magic like Array length.)
#### yallist.toArray()
Convert the list to an array.
#### yallist.forEach(fn, [thisp])
Call a function on each item in the list.
#### yallist.forEachReverse(fn, [thisp])
Call a function on each item in the list, in reverse order.
#### yallist.get(n)
Get the data at position `n` in the list. If you use this a lot,
probably better off just using an Array.
#### yallist.getReverse(n)
Get the data at position `n`, counting from the tail.
#### yallist.map(fn, thisp)
Create a new Yallist with the result of calling the function on each
item.
#### yallist.mapReverse(fn, thisp)
Same as `map`, but in reverse.
#### yallist.pop()
Get the data from the list tail, and remove the tail from the list.
#### yallist.push(item, ...)
Insert one or more items to the tail of the list.
#### yallist.reduce(fn, initialValue)
Like Array.reduce.
#### yallist.reduceReverse
Like Array.reduce, but in reverse.
#### yallist.reverse
Reverse the list in place.
#### yallist.shift()
Get the data from the list head, and remove the head from the list.
#### yallist.slice([from], [to])
Just like Array.slice, but returns a new Yallist.
#### yallist.sliceReverse([from], [to])
Just like yallist.slice, but the result is returned in reverse.
#### yallist.toArray()
Create an array representation of the list.
#### yallist.toArrayReverse()
Create a reversed array representation of the list.
#### yallist.unshift(item, ...)
Insert one or more items to the head of the list.
#### yallist.unshiftNode(node)
Move a Node object to the front of the list. (That is, pull it out of
wherever it lives, and make it the new head.)
If the node belongs to a different list, then that list will remove it
first.
#### yallist.pushNode(node)
Move a Node object to the end of the list. (That is, pull it out of
wherever it lives, and make it the new tail.)
If the node belongs to a list already, then that list will remove it
first.
#### yallist.removeNode(node)
Remove a node from the list, preserving referential integrity of head
and tail and other nodes.
Will throw an error if you try to have a list remove a node that
doesn't belong to it.
### Yallist.Node
The class that holds the data and is actually the list.
Call with `var n = new Node(value, previousNode, nextNode)`
Note that if you do direct operations on Nodes themselves, it's very
easy to get into weird states where the list is broken. Be careful :)
#### node.next
The next node in the list.
#### node.prev
The previous node in the list.
#### node.value
The data the node contains.
#### node.list
The list to which this node belongs. (Null if it does not belong to
any list.)
'use strict'
var Yallist = require('./yallist.js')
Yallist.prototype[Symbol.iterator] = function* () {
for (let walker = this.head; walker; walker = walker.next) {
yield walker.value
}
}
{
"_from": "yallist@^3.0.0",
"_id": "yallist@3.0.2",
"_inBundle": false,
"_integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k=",
"_location": "/minipass/yallist",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "yallist@^3.0.0",
"name": "yallist",
"escapedName": "yallist",
"rawSpec": "^3.0.0",
"saveSpec": null,
"fetchSpec": "^3.0.0"
},
"_requiredBy": [
"/minipass"
],
"_resolved": "http://registry.npm.taobao.org/yallist/download/yallist-3.0.2.tgz",
"_shasum": "8452b4bb7e83c7c188d8041c1a837c773d6d8bb9",
"_spec": "yallist@^3.0.0",
"_where": "/Users/medicean/workspace/antSword/node_modules/minipass",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/yallist/issues"
},
"bundleDependencies": false,
"dependencies": {},
"deprecated": false,
"description": "Yet Another Linked List",
"devDependencies": {
"tap": "^10.3.0"
},
"directories": {
"test": "test"
},
"files": [
"yallist.js",
"iterator.js"
],
"homepage": "https://github.com/isaacs/yallist#readme",
"license": "ISC",
"main": "yallist.js",
"name": "yallist",
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/yallist.git"
},
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js --100"
},
"version": "3.0.2"
}
'use strict'
module.exports = Yallist
Yallist.Node = Node
Yallist.create = Yallist
function Yallist (list) {
var self = this
if (!(self instanceof Yallist)) {
self = new Yallist()
}
self.tail = null
self.head = null
self.length = 0
if (list && typeof list.forEach === 'function') {
list.forEach(function (item) {
self.push(item)
})
} else if (arguments.length > 0) {
for (var i = 0, l = arguments.length; i < l; i++) {
self.push(arguments[i])
}
}
return self
}
Yallist.prototype.removeNode = function (node) {
if (node.list !== this) {
throw new Error('removing node which does not belong to this list')
}
var next = node.next
var prev = node.prev
if (next) {
next.prev = prev
}
if (prev) {
prev.next = next
}
if (node === this.head) {
this.head = next
}
if (node === this.tail) {
this.tail = prev
}
node.list.length--
node.next = null
node.prev = null
node.list = null
}
Yallist.prototype.unshiftNode = function (node) {
if (node === this.head) {
return
}
if (node.list) {
node.list.removeNode(node)
}
var head = this.head
node.list = this
node.next = head
if (head) {
head.prev = node
}
this.head = node
if (!this.tail) {
this.tail = node
}
this.length++
}
Yallist.prototype.pushNode = function (node) {
if (node === this.tail) {
return
}
if (node.list) {
node.list.removeNode(node)
}
var tail = this.tail
node.list = this
node.prev = tail
if (tail) {
tail.next = node
}
this.tail = node
if (!this.head) {
this.head = node
}
this.length++
}
Yallist.prototype.push = function () {
for (var i = 0, l = arguments.length; i < l; i++) {
push(this, arguments[i])
}
return this.length
}
Yallist.prototype.unshift = function () {
for (var i = 0, l = arguments.length; i < l; i++) {
unshift(this, arguments[i])
}
return this.length
}
Yallist.prototype.pop = function () {
if (!this.tail) {
return undefined
}
var res = this.tail.value
this.tail = this.tail.prev
if (this.tail) {
this.tail.next = null
} else {
this.head = null
}
this.length--
return res
}
Yallist.prototype.shift = function () {
if (!this.head) {
return undefined
}
var res = this.head.value
this.head = this.head.next
if (this.head) {
this.head.prev = null
} else {
this.tail = null
}
this.length--
return res
}
Yallist.prototype.forEach = function (fn, thisp) {
thisp = thisp || this
for (var walker = this.head, i = 0; walker !== null; i++) {
fn.call(thisp, walker.value, i, this)
walker = walker.next
}
}
Yallist.prototype.forEachReverse = function (fn, thisp) {
thisp = thisp || this
for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
fn.call(thisp, walker.value, i, this)
walker = walker.prev
}
}
Yallist.prototype.get = function (n) {
for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
// abort out of the list early if we hit a cycle
walker = walker.next
}
if (i === n && walker !== null) {
return walker.value
}
}
Yallist.prototype.getReverse = function (n) {
for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
// abort out of the list early if we hit a cycle
walker = walker.prev
}
if (i === n && walker !== null) {
return walker.value
}
}
Yallist.prototype.map = function (fn, thisp) {
thisp = thisp || this
var res = new Yallist()
for (var walker = this.head; walker !== null;) {
res.push(fn.call(thisp, walker.value, this))
walker = walker.next
}
return res
}
Yallist.prototype.mapReverse = function (fn, thisp) {
thisp = thisp || this
var res = new Yallist()
for (var walker = this.tail; walker !== null;) {
res.push(fn.call(thisp, walker.value, this))
walker = walker.prev
}
return res
}
Yallist.prototype.reduce = function (fn, initial) {
var acc
var walker = this.head
if (arguments.length > 1) {
acc = initial
} else if (this.head) {
walker = this.head.next
acc = this.head.value
} else {
throw new TypeError('Reduce of empty list with no initial value')
}
for (var i = 0; walker !== null; i++) {
acc = fn(acc, walker.value, i)
walker = walker.next
}
return acc
}
Yallist.prototype.reduceReverse = function (fn, initial) {
var acc
var walker = this.tail
if (arguments.length > 1) {
acc = initial
} else if (this.tail) {
walker = this.tail.prev
acc = this.tail.value
} else {
throw new TypeError('Reduce of empty list with no initial value')
}
for (var i = this.length - 1; walker !== null; i--) {
acc = fn(acc, walker.value, i)
walker = walker.prev
}
return acc
}
Yallist.prototype.toArray = function () {
var arr = new Array(this.length)
for (var i = 0, walker = this.head; walker !== null; i++) {
arr[i] = walker.value
walker = walker.next
}
return arr
}
Yallist.prototype.toArrayReverse = function () {
var arr = new Array(this.length)
for (var i = 0, walker = this.tail; walker !== null; i++) {
arr[i] = walker.value
walker = walker.prev
}
return arr
}
Yallist.prototype.slice = function (from, to) {
to = to || this.length
if (to < 0) {
to += this.length
}
from = from || 0
if (from < 0) {
from += this.length
}
var ret = new Yallist()
if (to < from || to < 0) {
return ret
}
if (from < 0) {
from = 0
}
if (to > this.length) {
to = this.length
}
for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
walker = walker.next
}
for (; walker !== null && i < to; i++, walker = walker.next) {
ret.push(walker.value)
}
return ret
}
Yallist.prototype.sliceReverse = function (from, to) {
to = to || this.length
if (to < 0) {
to += this.length
}
from = from || 0
if (from < 0) {
from += this.length
}
var ret = new Yallist()
if (to < from || to < 0) {
return ret
}
if (from < 0) {
from = 0
}
if (to > this.length) {
to = this.length
}
for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
walker = walker.prev
}
for (; walker !== null && i > from; i--, walker = walker.prev) {
ret.push(walker.value)
}
return ret
}
Yallist.prototype.reverse = function () {
var head = this.head
var tail = this.tail
for (var walker = head; walker !== null; walker = walker.prev) {
var p = walker.prev
walker.prev = walker.next
walker.next = p
}
this.head = tail
this.tail = head
return this
}
function push (self, item) {
self.tail = new Node(item, self.tail, null, self)
if (!self.head) {
self.head = self.tail
}
self.length++
}
function unshift (self, item) {
self.head = new Node(item, null, self.head, self)
if (!self.tail) {
self.tail = self.head
}
self.length++
}
function Node (value, prev, next, list) {
if (!(this instanceof Node)) {
return new Node(value, prev, next, list)
}
this.list = list
this.value = value
if (prev) {
prev.next = this
this.prev = prev
} else {
this.prev = null
}
if (next) {
next.prev = this
this.next = next
} else {
this.next = null
}
}
try {
// add if support or Symbol.iterator is present
require('./iterator.js')
} catch (er) {}
{
"_from": "minipass@^2.3.3",
"_id": "minipass@2.3.4",
"_inBundle": false,
"_integrity": "sha1-R2jXYF7WGU1tV2FpueEu9x6dmVc=",
"_location": "/minipass",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "minipass@^2.3.3",
"name": "minipass",
"escapedName": "minipass",
"rawSpec": "^2.3.3",
"saveSpec": null,
"fetchSpec": "^2.3.3"
},
"_requiredBy": [
"/fs-minipass",
"/minizlib",
"/tar"
],
"_resolved": "http://registry.npm.taobao.org/minipass/download/minipass-2.3.4.tgz",
"_shasum": "4768d7605ed6194d6d576169b9e12ef71e9d9957",
"_spec": "minipass@^2.3.3",
"_where": "/Users/medicean/workspace/antSword/node_modules/tar",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/minipass/issues"
},
"bundleDependencies": false,
"dependencies": {
"safe-buffer": "^5.1.2",
"yallist": "^3.0.0"
},
"deprecated": false,
"description": "minimal implementation of a PassThrough stream",
"devDependencies": {
"end-of-stream": "^1.4.0",
"tap": "^12.0.1",
"through2": "^2.0.3"
},
"files": [
"index.js"
],
"homepage": "https://github.com/isaacs/minipass#readme",
"keywords": [
"passthrough",
"stream"
],
"license": "ISC",
"main": "index.js",
"name": "minipass",
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/minipass.git"
},
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js --100"
},
"version": "2.3.4"
}
Minizlib was created by Isaac Z. Schlueter.
It is a derivative work of the Node.js project.
"""
Copyright Isaac Z. Schlueter and Contributors
Copyright Node.js contributors. All rights reserved.
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
# minizlib
A tiny fast zlib stream built on [minipass](http://npm.im/minipass)
and Node.js's zlib binding.
This module was created to serve the needs of
[node-tar](http://npm.im/tar) v2. If your needs are different, then
it may not be for you.
## How does this differ from the streams in `require('zlib')`?
First, there are no convenience methods to compress or decompress a
buffer. If you want those, use the built-in `zlib` module. This is
only streams.
This module compresses and decompresses the data as fast as you feed
it in. It is synchronous, and runs on the main process thread. Zlib
operations can be high CPU, but they're very fast, and doing it this
way means much less bookkeeping and artificial deferral.
Node's built in zlib streams are built on top of `stream.Transform`.
They do the maximally safe thing with respect to consistent
asynchrony, buffering, and backpressure.
This module _does_ support backpressure, and will buffer output chunks
that are not consumed, but is less of a mediator between the input and
output. There is no high or low watermarks, no state objects, and so
artificial async deferrals. It will not protect you from Zalgo.
If you write, data will be emitted right away. If you write
everything synchronously in one tick, and you are listening to the
`data` event to consume it, then it'll all be emitted right away in
that same tick. If you want data to be emitted in the next tick, then
write it in the next tick.
It is thus the responsibility of the reader and writer to manage their
own consumption and process execution flow.
The goal is to compress and decompress as fast as possible, even for
files that are too large to store all in one buffer.
The API is very similar to the built-in zlib module. There are
classes that you instantiate with `new` and they are streams that can
be piped together.
module.exports = Object.freeze({
Z_NO_FLUSH: 0,
Z_PARTIAL_FLUSH: 1,
Z_SYNC_FLUSH: 2,
Z_FULL_FLUSH: 3,
Z_FINISH: 4,
Z_BLOCK: 5,
Z_OK: 0,
Z_STREAM_END: 1,
Z_NEED_DICT: 2,
Z_ERRNO: -1,
Z_STREAM_ERROR: -2,
Z_DATA_ERROR: -3,
Z_MEM_ERROR: -4,
Z_BUF_ERROR: -5,
Z_VERSION_ERROR: -6,
Z_NO_COMPRESSION: 0,
Z_BEST_SPEED: 1,
Z_BEST_COMPRESSION: 9,
Z_DEFAULT_COMPRESSION: -1,
Z_FILTERED: 1,
Z_HUFFMAN_ONLY: 2,
Z_RLE: 3,
Z_FIXED: 4,
Z_DEFAULT_STRATEGY: 0,
ZLIB_VERNUM: 4736,
DEFLATE: 1,
INFLATE: 2,
GZIP: 3,
GUNZIP: 4,
DEFLATERAW: 5,
INFLATERAW: 6,
UNZIP: 7,
Z_MIN_WINDOWBITS: 8,
Z_MAX_WINDOWBITS: 15,
Z_DEFAULT_WINDOWBITS: 15,
Z_MIN_CHUNK: 64,
Z_MAX_CHUNK: Infinity,
Z_DEFAULT_CHUNK: 16384,
Z_MIN_MEMLEVEL: 1,
Z_MAX_MEMLEVEL: 9,
Z_DEFAULT_MEMLEVEL: 8,
Z_MIN_LEVEL: -1,
Z_MAX_LEVEL: 9,
Z_DEFAULT_LEVEL: -1
})
'use strict'
const assert = require('assert')
const Buffer = require('buffer').Buffer
const binding = process.binding('zlib')
const constants = exports.constants = require('./constants.js')
const MiniPass = require('minipass')
class ZlibError extends Error {
constructor (msg, errno) {
super('zlib: ' + msg)
this.errno = errno
this.code = codes.get(errno)
}
get name () {
return 'ZlibError'
}
}
// translation table for return codes.
const codes = new Map([
[constants.Z_OK, 'Z_OK'],
[constants.Z_STREAM_END, 'Z_STREAM_END'],
[constants.Z_NEED_DICT, 'Z_NEED_DICT'],
[constants.Z_ERRNO, 'Z_ERRNO'],
[constants.Z_STREAM_ERROR, 'Z_STREAM_ERROR'],
[constants.Z_DATA_ERROR, 'Z_DATA_ERROR'],
[constants.Z_MEM_ERROR, 'Z_MEM_ERROR'],
[constants.Z_BUF_ERROR, 'Z_BUF_ERROR'],
[constants.Z_VERSION_ERROR, 'Z_VERSION_ERROR']
])
const validFlushFlags = new Set([
constants.Z_NO_FLUSH,
constants.Z_PARTIAL_FLUSH,
constants.Z_SYNC_FLUSH,
constants.Z_FULL_FLUSH,
constants.Z_FINISH,
constants.Z_BLOCK
])
const strategies = new Set([
constants.Z_FILTERED,
constants.Z_HUFFMAN_ONLY,
constants.Z_RLE,
constants.Z_FIXED,
constants.Z_DEFAULT_STRATEGY
])
// the Zlib class they all inherit from
// This thing manages the queue of requests, and returns
// true or false if there is anything in the queue when
// you call the .write() method.
const _opts = Symbol('opts')
const _chunkSize = Symbol('chunkSize')
const _flushFlag = Symbol('flushFlag')
const _finishFlush = Symbol('finishFlush')
const _handle = Symbol('handle')
const _hadError = Symbol('hadError')
const _buffer = Symbol('buffer')
const _offset = Symbol('offset')
const _level = Symbol('level')
const _strategy = Symbol('strategy')
const _ended = Symbol('ended')
const _writeState = Symbol('writeState')
class Zlib extends MiniPass {
constructor (opts, mode) {
super(opts)
this[_ended] = false
this[_opts] = opts = opts || {}
this[_chunkSize] = opts.chunkSize || constants.Z_DEFAULT_CHUNK
if (opts.flush && !validFlushFlags.has(opts.flush)) {
throw new TypeError('Invalid flush flag: ' + opts.flush)
}
if (opts.finishFlush && !validFlushFlags.has(opts.finishFlush)) {
throw new TypeError('Invalid flush flag: ' + opts.finishFlush)
}
this[_flushFlag] = opts.flush || constants.Z_NO_FLUSH
this[_finishFlush] = typeof opts.finishFlush !== 'undefined' ?
opts.finishFlush : constants.Z_FINISH
if (opts.chunkSize) {
if (opts.chunkSize < constants.Z_MIN_CHUNK) {
throw new RangeError('Invalid chunk size: ' + opts.chunkSize)
}
}
if (opts.windowBits) {
if (opts.windowBits < constants.Z_MIN_WINDOWBITS ||
opts.windowBits > constants.Z_MAX_WINDOWBITS) {
throw new RangeError('Invalid windowBits: ' + opts.windowBits)
}
}
if (opts.level) {
if (opts.level < constants.Z_MIN_LEVEL ||
opts.level > constants.Z_MAX_LEVEL) {
throw new RangeError('Invalid compression level: ' + opts.level)
}
}
if (opts.memLevel) {
if (opts.memLevel < constants.Z_MIN_MEMLEVEL ||
opts.memLevel > constants.Z_MAX_MEMLEVEL) {
throw new RangeError('Invalid memLevel: ' + opts.memLevel)
}
}
if (opts.strategy && !(strategies.has(opts.strategy)))
throw new TypeError('Invalid strategy: ' + opts.strategy)
if (opts.dictionary) {
if (!(opts.dictionary instanceof Buffer)) {
throw new TypeError('Invalid dictionary: it should be a Buffer instance')
}
}
this[_handle] = new binding.Zlib(mode)
this[_hadError] = false
this[_handle].onerror = (message, errno) => {
// there is no way to cleanly recover.
// continuing only obscures problems.
this.close()
this[_hadError] = true
const error = new ZlibError(message, errno)
this.emit('error', error)
}
const level = typeof opts.level === 'number' ? opts.level
: constants.Z_DEFAULT_COMPRESSION
var strategy = typeof opts.strategy === 'number' ? opts.strategy
: constants.Z_DEFAULT_STRATEGY
this[_writeState] = new Uint32Array(2);
const window = opts.windowBits || constants.Z_DEFAULT_WINDOWBITS
const memLevel = opts.memLevel || constants.Z_DEFAULT_MEMLEVEL
// API changed in node v9
/* istanbul ignore next */
if (/^v[0-8]\./.test(process.version)) {
this[_handle].init(window,
level,
memLevel,
strategy,
opts.dictionary)
} else {
this[_handle].init(window,
level,
memLevel,
strategy,
this[_writeState],
() => {},
opts.dictionary)
}
this[_buffer] = Buffer.allocUnsafe(this[_chunkSize])
this[_offset] = 0
this[_level] = level
this[_strategy] = strategy
this.once('end', this.close)
}
close () {
if (this[_handle]) {
this[_handle].close()
this[_handle] = null
this.emit('close')
}
}
params (level, strategy) {
if (!this[_handle])
throw new Error('cannot switch params when binding is closed')
// no way to test this without also not supporting params at all
/* istanbul ignore if */
if (!this[_handle].params)
throw new Error('not supported in this implementation')
if (level < constants.Z_MIN_LEVEL ||
level > constants.Z_MAX_LEVEL) {
throw new RangeError('Invalid compression level: ' + level)
}
if (!(strategies.has(strategy)))
throw new TypeError('Invalid strategy: ' + strategy)
if (this[_level] !== level || this[_strategy] !== strategy) {
this.flush(constants.Z_SYNC_FLUSH)
assert(this[_handle], 'zlib binding closed')
this[_handle].params(level, strategy)
/* istanbul ignore else */
if (!this[_hadError]) {
this[_level] = level
this[_strategy] = strategy
}
}
}
reset () {
assert(this[_handle], 'zlib binding closed')
return this[_handle].reset()
}
flush (kind) {
if (kind === undefined)
kind = constants.Z_FULL_FLUSH
if (this.ended)
return
const flushFlag = this[_flushFlag]
this[_flushFlag] = kind
this.write(Buffer.alloc(0))
this[_flushFlag] = flushFlag
}
end (chunk, encoding, cb) {
if (chunk)
this.write(chunk, encoding)
this.flush(this[_finishFlush])
this[_ended] = true
return super.end(null, null, cb)
}
get ended () {
return this[_ended]
}
write (chunk, encoding, cb) {
// process the chunk using the sync process
// then super.write() all the outputted chunks
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (typeof chunk === 'string')
chunk = new Buffer(chunk, encoding)
let availInBefore = chunk && chunk.length
let availOutBefore = this[_chunkSize] - this[_offset]
let inOff = 0 // the offset of the input buffer
const flushFlag = this[_flushFlag]
let writeReturn = true
assert(this[_handle], 'zlib binding closed')
do {
let res = this[_handle].writeSync(
flushFlag,
chunk, // in
inOff, // in_off
availInBefore, // in_len
this[_buffer], // out
this[_offset], //out_off
availOutBefore // out_len
)
if (this[_hadError])
break
// API changed in v9
/* istanbul ignore next */
let availInAfter = res ? res[0] : this[_writeState][1]
/* istanbul ignore next */
let availOutAfter = res ? res[1] : this[_writeState][0]
const have = availOutBefore - availOutAfter
assert(have >= 0, 'have should not go down')
if (have > 0) {
const out = this[_buffer].slice(
this[_offset], this[_offset] + have
)
this[_offset] += have
// serve some output to the consumer.
writeReturn = super.write(out) && writeReturn
}
// exhausted the output buffer, or used all the input create a new one.
if (availOutAfter === 0 || this[_offset] >= this[_chunkSize]) {
availOutBefore = this[_chunkSize]
this[_offset] = 0
this[_buffer] = Buffer.allocUnsafe(this[_chunkSize])
}
if (availOutAfter === 0) {
// Not actually done. Need to reprocess.
// Also, update the availInBefore to the availInAfter value,
// so that if we have to hit it a third (fourth, etc.) time,
// it'll have the correct byte counts.
inOff += (availInBefore - availInAfter)
availInBefore = availInAfter
continue
}
break
} while (!this[_hadError])
if (cb)
cb()
return writeReturn
}
}
// minimal 2-byte header
class Deflate extends Zlib {
constructor (opts) {
super(opts, constants.DEFLATE)
}
}
class Inflate extends Zlib {
constructor (opts) {
super(opts, constants.INFLATE)
}
}
// gzip - bigger header, same deflate compression
class Gzip extends Zlib {
constructor (opts) {
super(opts, constants.GZIP)
}
}
class Gunzip extends Zlib {
constructor (opts) {
super(opts, constants.GUNZIP)
}
}
// raw - no header
class DeflateRaw extends Zlib {
constructor (opts) {
super(opts, constants.DEFLATERAW)
}
}
class InflateRaw extends Zlib {
constructor (opts) {
super(opts, constants.INFLATERAW)
}
}
// auto-detect header.
class Unzip extends Zlib {
constructor (opts) {
super(opts, constants.UNZIP)
}
}
exports.Deflate = Deflate
exports.Inflate = Inflate
exports.Gzip = Gzip
exports.Gunzip = Gunzip
exports.DeflateRaw = DeflateRaw
exports.InflateRaw = InflateRaw
exports.Unzip = Unzip
{
"_from": "minizlib@^1.1.0",
"_id": "minizlib@1.1.0",
"_inBundle": false,
"_integrity": "sha1-EeE2WM5GvDpwomeqxYNZ0eDCnOs=",
"_location": "/minizlib",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "minizlib@^1.1.0",
"name": "minizlib",
"escapedName": "minizlib",
"rawSpec": "^1.1.0",
"saveSpec": null,
"fetchSpec": "^1.1.0"
},
"_requiredBy": [
"/tar"
],
"_resolved": "http://registry.npm.taobao.org/minizlib/download/minizlib-1.1.0.tgz",
"_shasum": "11e13658ce46bc3a70a267aac58359d1e0c29ceb",
"_spec": "minizlib@^1.1.0",
"_where": "/Users/medicean/workspace/antSword/node_modules/tar",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
"bugs": {
"url": "https://github.com/isaacs/minizlib/issues"
},
"bundleDependencies": false,
"dependencies": {
"minipass": "^2.2.1"
},
"deprecated": false,
"description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
"devDependencies": {
"tap": "^10.7.2"
},
"files": [
"index.js",
"constants.js"
],
"homepage": "https://github.com/isaacs/minizlib#readme",
"keywords": [
"zlib",
"gzip",
"gunzip",
"deflate",
"inflate",
"compression",
"zip",
"unzip"
],
"license": "MIT",
"main": "index.js",
"name": "minizlib",
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/minizlib.git"
},
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test/*.js --100 -J"
},
"version": "1.1.0"
}
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
This diff is collapsed.
'use strict'
// high-level commands
exports.c = exports.create = require('./lib/create.js')
exports.r = exports.replace = require('./lib/replace.js')
exports.t = exports.list = require('./lib/list.js')
exports.u = exports.update = require('./lib/update.js')
exports.x = exports.extract = require('./lib/extract.js')
// classes
exports.Pack = require('./lib/pack.js')
exports.Unpack = require('./lib/unpack.js')
exports.Parse = require('./lib/parse.js')
exports.ReadEntry = require('./lib/read-entry.js')
exports.WriteEntry = require('./lib/write-entry.js')
exports.Header = require('./lib/header.js')
exports.Pax = require('./lib/pax.js')
exports.types = require('./lib/types.js')
'use strict'
// Buffer in node 4.x < 4.5.0 doesn't have working Buffer.from
// or Buffer.alloc, and Buffer in node 10 deprecated the ctor.
// .M, this is fine .\^/M..
let B = Buffer
/* istanbul ignore next */
if (!B.alloc) {
B = require('safe-buffer').Buffer
}
module.exports = B
'use strict'
// tar -c
const hlo = require('./high-level-opt.js')
const Pack = require('./pack.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const t = require('./list.js')
const path = require('path')
const c = module.exports = (opt_, files, cb) => {
if (typeof files === 'function')
cb = files
if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (!files || !Array.isArray(files) || !files.length)
throw new TypeError('no files or directories specified')
files = Array.from(files)
const opt = hlo(opt_)
if (opt.sync && typeof cb === 'function')
throw new TypeError('callback not supported for sync tar functions')
if (!opt.file && typeof cb === 'function')
throw new TypeError('callback only supported with file option')
return opt.file && opt.sync ? createFileSync(opt, files)
: opt.file ? createFile(opt, files, cb)
: opt.sync ? createSync(opt, files)
: create(opt, files)
}
const createFileSync = (opt, files) => {
const p = new Pack.Sync(opt)
const stream = new fsm.WriteStreamSync(opt.file, {
mode: opt.mode || 0o666
})
p.pipe(stream)
addFilesSync(p, files)
}
const createFile = (opt, files, cb) => {
const p = new Pack(opt)
const stream = new fsm.WriteStream(opt.file, {
mode: opt.mode || 0o666
})
p.pipe(stream)
const promise = new Promise((res, rej) => {
stream.on('error', rej)
stream.on('close', res)
p.on('error', rej)
})
addFilesAsync(p, files)
return cb ? promise.then(cb, cb) : promise
}
const addFilesSync = (p, files) => {
files.forEach(file => {
if (file.charAt(0) === '@')
t({
file: path.resolve(p.cwd, file.substr(1)),
sync: true,
noResume: true,
onentry: entry => p.add(entry)
})
else
p.add(file)
})
p.end()
}
const addFilesAsync = (p, files) => {
while (files.length) {
const file = files.shift()
if (file.charAt(0) === '@')
return t({
file: path.resolve(p.cwd, file.substr(1)),
noResume: true,
onentry: entry => p.add(entry)
}).then(_ => addFilesAsync(p, files))
else
p.add(file)
}
p.end()
}
const createSync = (opt, files) => {
const p = new Pack.Sync(opt)
addFilesSync(p, files)
return p
}
const create = (opt, files) => {
const p = new Pack(opt)
addFilesAsync(p, files)
return p
}
'use strict'
// tar -x
const hlo = require('./high-level-opt.js')
const Unpack = require('./unpack.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const path = require('path')
const x = module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function')
cb = opt_, files = null, opt_ = {}
else if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (typeof files === 'function')
cb = files, files = null
if (!files)
files = []
else
files = Array.from(files)
const opt = hlo(opt_)
if (opt.sync && typeof cb === 'function')
throw new TypeError('callback not supported for sync tar functions')
if (!opt.file && typeof cb === 'function')
throw new TypeError('callback only supported with file option')
if (files.length)
filesFilter(opt, files)
return opt.file && opt.sync ? extractFileSync(opt)
: opt.file ? extractFile(opt, cb)
: opt.sync ? extractSync(opt)
: extract(opt)
}
// construct a filter that limits the file entries listed
// include child entries if a dir is included
const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
const filter = opt.filter
const mapHas = (file, r) => {
const root = r || path.parse(file).root || '.'
const ret = file === root ? false
: map.has(file) ? map.get(file)
: mapHas(path.dirname(file), root)
map.set(file, ret)
return ret
}
opt.filter = filter
? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
: file => mapHas(file.replace(/\/+$/, ''))
}
const extractFileSync = opt => {
const u = new Unpack.Sync(opt)
const file = opt.file
let threw = true
let fd
const stat = fs.statSync(file)
// This trades a zero-byte read() syscall for a stat
// However, it will usually result in less memory allocation
const readSize = opt.maxReadSize || 16*1024*1024
const stream = new fsm.ReadStreamSync(file, {
readSize: readSize,
size: stat.size
})
stream.pipe(u)
}
const extractFile = (opt, cb) => {
const u = new Unpack(opt)
const readSize = opt.maxReadSize || 16*1024*1024
const file = opt.file
const p = new Promise((resolve, reject) => {
u.on('error', reject)
u.on('close', resolve)
// This trades a zero-byte read() syscall for a stat
// However, it will usually result in less memory allocation
fs.stat(file, (er, stat) => {
if (er)
reject(er)
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
size: stat.size
})
stream.on('error', reject)
stream.pipe(u)
}
})
})
return cb ? p.then(cb, cb) : p
}
const extractSync = opt => {
return new Unpack.Sync(opt)
}
const extract = opt => {
return new Unpack(opt)
}
'use strict'
// parse a 512-byte header block to a data object, or vice-versa
// encode returns `true` if a pax extended header is needed, because
// the data could not be faithfully encoded in a simple header.
// (Also, check header.needPax to see if it needs a pax header.)
const Buffer = require('./buffer.js')
const types = require('./types.js')
const pathModule = require('path').posix
const large = require('./large-numbers.js')
const SLURP = Symbol('slurp')
const TYPE = Symbol('type')
class Header {
constructor (data, off, ex, gex) {
this.cksumValid = false
this.needPax = false
this.nullBlock = false
this.block = null
this.path = null
this.mode = null
this.uid = null
this.gid = null
this.size = null
this.mtime = null
this.cksum = null
this[TYPE] = '0'
this.linkpath = null
this.uname = null
this.gname = null
this.devmaj = 0
this.devmin = 0
this.atime = null
this.ctime = null
if (Buffer.isBuffer(data))
this.decode(data, off || 0, ex, gex)
else if (data)
this.set(data)
}
decode (buf, off, ex, gex) {
if (!off)
off = 0
if (!buf || !(buf.length >= off + 512))
throw new Error('need 512 bytes for header')
this.path = decString(buf, off, 100)
this.mode = decNumber(buf, off + 100, 8)
this.uid = decNumber(buf, off + 108, 8)
this.gid = decNumber(buf, off + 116, 8)
this.size = decNumber(buf, off + 124, 12)
this.mtime = decDate(buf, off + 136, 12)
this.cksum = decNumber(buf, off + 148, 12)
// if we have extended or global extended headers, apply them now
// See https://github.com/npm/node-tar/pull/187
this[SLURP](ex)
this[SLURP](gex, true)
// old tar versions marked dirs as a file with a trailing /
this[TYPE] = decString(buf, off + 156, 1)
if (this[TYPE] === '')
this[TYPE] = '0'
if (this[TYPE] === '0' && this.path.substr(-1) === '/')
this[TYPE] = '5'
// tar implementations sometimes incorrectly put the stat(dir).size
// as the size in the tarball, even though Directory entries are
// not able to have any body at all. In the very rare chance that
// it actually DOES have a body, we weren't going to do anything with
// it anyway, and it'll just be a warning about an invalid header.
if (this[TYPE] === '5')
this.size = 0
this.linkpath = decString(buf, off + 157, 100)
if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') {
this.uname = decString(buf, off + 265, 32)
this.gname = decString(buf, off + 297, 32)
this.devmaj = decNumber(buf, off + 329, 8)
this.devmin = decNumber(buf, off + 337, 8)
if (buf[off + 475] !== 0) {
// definitely a prefix, definitely >130 chars.
const prefix = decString(buf, off + 345, 155)
this.path = prefix + '/' + this.path
} else {
const prefix = decString(buf, off + 345, 130)
if (prefix)
this.path = prefix + '/' + this.path
this.atime = decDate(buf, off + 476, 12)
this.ctime = decDate(buf, off + 488, 12)
}
}
let sum = 8 * 0x20
for (let i = off; i < off + 148; i++) {
sum += buf[i]
}
for (let i = off + 156; i < off + 512; i++) {
sum += buf[i]
}
this.cksumValid = sum === this.cksum
if (this.cksum === null && sum === 8 * 0x20)
this.nullBlock = true
}
[SLURP] (ex, global) {
for (let k in ex) {
// we slurp in everything except for the path attribute in
// a global extended header, because that's weird.
if (ex[k] !== null && ex[k] !== undefined &&
!(global && k === 'path'))
this[k] = ex[k]
}
}
encode (buf, off) {
if (!buf) {
buf = this.block = Buffer.alloc(512)
off = 0
}
if (!off)
off = 0
if (!(buf.length >= off + 512))
throw new Error('need 512 bytes for header')
const prefixSize = this.ctime || this.atime ? 130 : 155
const split = splitPrefix(this.path || '', prefixSize)
const path = split[0]
const prefix = split[1]
this.needPax = split[2]
this.needPax = encString(buf, off, 100, path) || this.needPax
this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax
this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax
this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax
this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax
this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax
buf[off + 156] = this[TYPE].charCodeAt(0)
this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax
buf.write('ustar\u000000', off + 257, 8)
this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax
this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax
this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax
this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax
this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax
if (buf[off + 475] !== 0)
this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax
else {
this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax
this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax
this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax
}
let sum = 8 * 0x20
for (let i = off; i < off + 148; i++) {
sum += buf[i]
}
for (let i = off + 156; i < off + 512; i++) {
sum += buf[i]
}
this.cksum = sum
encNumber(buf, off + 148, 8, this.cksum)
this.cksumValid = true
return this.needPax
}
set (data) {
for (let i in data) {
if (data[i] !== null && data[i] !== undefined)
this[i] = data[i]
}
}
get type () {
return types.name.get(this[TYPE]) || this[TYPE]
}
get typeKey () {
return this[TYPE]
}
set type (type) {
if (types.code.has(type))
this[TYPE] = types.code.get(type)
else
this[TYPE] = type
}
}
const splitPrefix = (p, prefixSize) => {
const pathSize = 100
let pp = p
let prefix = ''
let ret
const root = pathModule.parse(p).root || '.'
if (Buffer.byteLength(pp) < pathSize)
ret = [pp, prefix, false]
else {
// first set prefix to the dir, and path to the base
prefix = pathModule.dirname(pp)
pp = pathModule.basename(pp)
do {
// both fit!
if (Buffer.byteLength(pp) <= pathSize &&
Buffer.byteLength(prefix) <= prefixSize)
ret = [pp, prefix, false]
// prefix fits in prefix, but path doesn't fit in path
else if (Buffer.byteLength(pp) > pathSize &&
Buffer.byteLength(prefix) <= prefixSize)
ret = [pp.substr(0, pathSize - 1), prefix, true]
else {
// make path take a bit from prefix
pp = pathModule.join(pathModule.basename(prefix), pp)
prefix = pathModule.dirname(prefix)
}
} while (prefix !== root && !ret)
// at this point, found no resolution, just truncate
if (!ret)
ret = [p.substr(0, pathSize - 1), '', true]
}
return ret
}
const decString = (buf, off, size) =>
buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '')
const decDate = (buf, off, size) =>
numToDate(decNumber(buf, off, size))
const numToDate = num => num === null ? null : new Date(num * 1000)
const decNumber = (buf, off, size) =>
buf[off] & 0x80 ? large.parse(buf.slice(off, off + size))
: decSmallNumber(buf, off, size)
const nanNull = value => isNaN(value) ? null : value
const decSmallNumber = (buf, off, size) =>
nanNull(parseInt(
buf.slice(off, off + size)
.toString('utf8').replace(/\0.*$/, '').trim(), 8))
// the maximum encodable as a null-terminated octal, by field size
const MAXNUM = {
12: 0o77777777777,
8 : 0o7777777
}
const encNumber = (buf, off, size, number) =>
number === null ? false :
number > MAXNUM[size] || number < 0
? (large.encode(number, buf.slice(off, off + size)), true)
: (encSmallNumber(buf, off, size, number), false)
const encSmallNumber = (buf, off, size, number) =>
buf.write(octalString(number, size), off, size, 'ascii')
const octalString = (number, size) =>
padOctal(Math.floor(number).toString(8), size)
const padOctal = (string, size) =>
(string.length === size - 1 ? string
: new Array(size - string.length - 1).join('0') + string + ' ') + '\0'
const encDate = (buf, off, size, date) =>
date === null ? false :
encNumber(buf, off, size, date.getTime() / 1000)
// enough to fill the longest string we've got
const NULLS = new Array(156).join('\0')
// pad with nulls, return true if it's longer or non-ascii
const encString = (buf, off, size, string) =>
string === null ? false :
(buf.write(string + NULLS, off, size, 'utf8'),
string.length !== Buffer.byteLength(string) || string.length > size)
module.exports = Header
'use strict'
// turn tar(1) style args like `C` into the more verbose things like `cwd`
const argmap = new Map([
['C', 'cwd'],
['f', 'file'],
['z', 'gzip'],
['P', 'preservePaths'],
['U', 'unlink'],
['strip-components', 'strip'],
['stripComponents', 'strip'],
['keep-newer', 'newer'],
['keepNewer', 'newer'],
['keep-newer-files', 'newer'],
['keepNewerFiles', 'newer'],
['k', 'keep'],
['keep-existing', 'keep'],
['keepExisting', 'keep'],
['m', 'noMtime'],
['no-mtime', 'noMtime'],
['p', 'preserveOwner'],
['L', 'follow'],
['h', 'follow']
])
const parse = module.exports = opt => opt ? Object.keys(opt).map(k => [
argmap.has(k) ? argmap.get(k) : k, opt[k]
]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {}
'use strict'
// Tar can encode large and negative numbers using a leading byte of
// 0xff for negative, and 0x80 for positive. The trailing byte in the
// section will always be 0x20, or in some implementations 0x00.
// this module encodes and decodes these things.
const encode = exports.encode = (num, buf) => {
buf[buf.length - 1] = 0x20
if (num < 0)
encodeNegative(num, buf)
else
encodePositive(num, buf)
return buf
}
const encodePositive = (num, buf) => {
buf[0] = 0x80
for (var i = buf.length - 2; i > 0; i--) {
if (num === 0)
buf[i] = 0
else {
buf[i] = num % 0x100
num = Math.floor(num / 0x100)
}
}
}
const encodeNegative = (num, buf) => {
buf[0] = 0xff
var flipped = false
num = num * -1
for (var i = buf.length - 2; i > 0; i--) {
var byte
if (num === 0)
byte = 0
else {
byte = num % 0x100
num = Math.floor(num / 0x100)
}
if (flipped)
buf[i] = onesComp(byte)
else if (byte === 0)
buf[i] = 0
else {
flipped = true
buf[i] = twosComp(byte)
}
}
}
const parse = exports.parse = (buf) => {
var post = buf[buf.length - 1]
var pre = buf[0]
return pre === 0x80 ? pos(buf.slice(1, buf.length - 1))
: twos(buf.slice(1, buf.length - 1))
}
const twos = (buf) => {
var len = buf.length
var sum = 0
var flipped = false
for (var i = len - 1; i > -1; i--) {
var byte = buf[i]
var f
if (flipped)
f = onesComp(byte)
else if (byte === 0)
f = byte
else {
flipped = true
f = twosComp(byte)
}
if (f !== 0)
sum += f * Math.pow(256, len - i - 1)
}
return sum * -1
}
const pos = (buf) => {
var len = buf.length
var sum = 0
for (var i = len - 1; i > -1; i--) {
var byte = buf[i]
if (byte !== 0)
sum += byte * Math.pow(256, len - i - 1)
}
return sum
}
const onesComp = byte => (0xff ^ byte) & 0xff
const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
'use strict'
const Buffer = require('./buffer.js')
// XXX: This shares a lot in common with extract.js
// maybe some DRY opportunity here?
// tar -t
const hlo = require('./high-level-opt.js')
const Parser = require('./parse.js')
const fs = require('fs')
const fsm = require('fs-minipass')
const path = require('path')
const t = module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function')
cb = opt_, files = null, opt_ = {}
else if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (typeof files === 'function')
cb = files, files = null
if (!files)
files = []
else
files = Array.from(files)
const opt = hlo(opt_)
if (opt.sync && typeof cb === 'function')
throw new TypeError('callback not supported for sync tar functions')
if (!opt.file && typeof cb === 'function')
throw new TypeError('callback only supported with file option')
if (files.length)
filesFilter(opt, files)
if (!opt.noResume)
onentryFunction(opt)
return opt.file && opt.sync ? listFileSync(opt)
: opt.file ? listFile(opt, cb)
: list(opt)
}
const onentryFunction = opt => {
const onentry = opt.onentry
opt.onentry = onentry ? e => {
onentry(e)
e.resume()
} : e => e.resume()
}
// construct a filter that limits the file entries listed
// include child entries if a dir is included
const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true]))
const filter = opt.filter
const mapHas = (file, r) => {
const root = r || path.parse(file).root || '.'
const ret = file === root ? false
: map.has(file) ? map.get(file)
: mapHas(path.dirname(file), root)
map.set(file, ret)
return ret
}
opt.filter = filter
? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, ''))
: file => mapHas(file.replace(/\/+$/, ''))
}
const listFileSync = opt => {
const p = list(opt)
const file = opt.file
let threw = true
let fd
try {
const stat = fs.statSync(file)
const readSize = opt.maxReadSize || 16*1024*1024
if (stat.size < readSize) {
p.end(fs.readFileSync(file))
} else {
let pos = 0
const buf = Buffer.allocUnsafe(readSize)
fd = fs.openSync(file, 'r')
while (pos < stat.size) {
let bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
pos += bytesRead
p.write(buf.slice(0, bytesRead))
}
p.end()
}
threw = false
} finally {
if (threw && fd)
try { fs.closeSync(fd) } catch (er) {}
}
}
const listFile = (opt, cb) => {
const parse = new Parser(opt)
const readSize = opt.maxReadSize || 16*1024*1024
const file = opt.file
const p = new Promise((resolve, reject) => {
parse.on('error', reject)
parse.on('end', resolve)
fs.stat(file, (er, stat) => {
if (er)
reject(er)
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
size: stat.size
})
stream.on('error', reject)
stream.pipe(parse)
}
})
})
return cb ? p.then(cb, cb) : p
}
const list = opt => new Parser(opt)
'use strict'
// wrapper around mkdirp for tar's needs.
// TODO: This should probably be a class, not functionally
// passing around state in a gazillion args.
const mkdirp = require('mkdirp')
const fs = require('fs')
const path = require('path')
const chownr = require('chownr')
class SymlinkError extends Error {
constructor (symlink, path) {
super('Cannot extract through symbolic link')
this.path = path
this.symlink = symlink
}
get name () {
return 'SylinkError'
}
}
class CwdError extends Error {
constructor (path, code) {
super(code + ': Cannot cd into \'' + path + '\'')
this.path = path
this.code = code
}
get name () {
return 'CwdError'
}
}
const mkdir = module.exports = (dir, opt, cb) => {
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
const uid = opt.uid
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
( uid !== opt.processUid || gid !== opt.processGid )
const preserve = opt.preserve
const unlink = opt.unlink
const cache = opt.cache
const cwd = opt.cwd
const done = (er, created) => {
if (er)
cb(er)
else {
cache.set(dir, true)
if (created && doChown)
chownr(created, uid, gid, er => done(er))
else if (needChmod)
fs.chmod(dir, mode, cb)
else
cb()
}
}
if (cache && cache.get(dir) === true)
return done()
if (dir === cwd)
return fs.lstat(dir, (er, st) => {
if (er || !st.isDirectory())
er = new CwdError(dir, er && er.code || 'ENOTDIR')
done(er)
})
if (preserve)
return mkdirp(dir, mode, done)
const sub = path.relative(cwd, dir)
const parts = sub.split(/\/|\\/)
mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
}
const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
if (!parts.length)
return cb(null, created)
const p = parts.shift()
const part = base + '/' + p
if (cache.get(part))
return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
}
const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
if (er) {
if (er.path && path.dirname(er.path) === cwd &&
(er.code === 'ENOTDIR' || er.code === 'ENOENT'))
return cb(new CwdError(cwd, er.code))
fs.lstat(part, (statEr, st) => {
if (statEr)
cb(statEr)
else if (st.isDirectory())
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
else if (unlink)
fs.unlink(part, er => {
if (er)
return cb(er)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
})
else if (st.isSymbolicLink())
return cb(new SymlinkError(part, part + '/' + parts.join('/')))
else
cb(er)
})
} else {
created = created || part
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
}
}
const mkdirSync = module.exports.sync = (dir, opt) => {
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
const uid = opt.uid
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
( uid !== opt.processUid || gid !== opt.processGid )
const preserve = opt.preserve
const unlink = opt.unlink
const cache = opt.cache
const cwd = opt.cwd
const done = (created) => {
cache.set(dir, true)
if (created && doChown)
chownr.sync(created, uid, gid)
if (needChmod)
fs.chmodSync(dir, mode)
}
if (cache && cache.get(dir) === true)
return done()
if (dir === cwd) {
let ok = false
let code = 'ENOTDIR'
try {
ok = fs.lstatSync(dir).isDirectory()
} catch (er) {
code = er.code
} finally {
if (!ok)
throw new CwdError(dir, code)
}
done()
return
}
if (preserve)
return done(mkdirp.sync(dir, mode))
const sub = path.relative(cwd, dir)
const parts = sub.split(/\/|\\/)
let created = null
for (let p = parts.shift(), part = cwd;
p && (part += '/' + p);
p = parts.shift()) {
if (cache.get(part))
continue
try {
fs.mkdirSync(part, mode)
created = created || part
cache.set(part, true)
} catch (er) {
if (er.path && path.dirname(er.path) === cwd &&
(er.code === 'ENOTDIR' || er.code === 'ENOENT'))
return new CwdError(cwd, er.code)
const st = fs.lstatSync(part)
if (st.isDirectory()) {
cache.set(part, true)
continue
} else if (unlink) {
fs.unlinkSync(part)
fs.mkdirSync(part, mode)
created = created || part
cache.set(part, true)
continue
} else if (st.isSymbolicLink())
return new SymlinkError(part, part + '/' + parts.join('/'))
}
}
return done(created)
}
'use strict'
module.exports = (mode, isDir) => {
mode &= 0o7777
// if dirs are readable, then they should be listable
if (isDir) {
if (mode & 0o400)
mode |= 0o100
if (mode & 0o40)
mode |= 0o10
if (mode & 0o4)
mode |= 0o1
}
return mode
}
'use strict'
const Buffer = require('./buffer.js')
// A readable tar stream creator
// Technically, this is a transform stream that you write paths into,
// and tar format comes out of.
// The `add()` method is like `write()` but returns this,
// and end() return `this` as well, so you can
// do `new Pack(opt).add('files').add('dir').end().pipe(output)
// You could also do something like:
// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
class PackJob {
constructor (path, absolute) {
this.path = path || './'
this.absolute = absolute
this.entry = null
this.stat = null
this.readdir = null
this.pending = false
this.ignore = false
this.piped = false
}
}
const MiniPass = require('minipass')
const zlib = require('minizlib')
const ReadEntry = require('./read-entry.js')
const WriteEntry = require('./write-entry.js')
const WriteEntrySync = WriteEntry.Sync
const WriteEntryTar = WriteEntry.Tar
const Yallist = require('yallist')
const EOF = Buffer.alloc(1024)
const ONSTAT = Symbol('onStat')
const ENDED = Symbol('ended')
const QUEUE = Symbol('queue')
const CURRENT = Symbol('current')
const PROCESS = Symbol('process')
const PROCESSING = Symbol('processing')
const PROCESSJOB = Symbol('processJob')
const JOBS = Symbol('jobs')
const JOBDONE = Symbol('jobDone')
const ADDFSENTRY = Symbol('addFSEntry')
const ADDTARENTRY = Symbol('addTarEntry')
const STAT = Symbol('stat')
const READDIR = Symbol('readdir')
const ONREADDIR = Symbol('onreaddir')
const PIPE = Symbol('pipe')
const ENTRY = Symbol('entry')
const ENTRYOPT = Symbol('entryOpt')
const WRITEENTRYCLASS = Symbol('writeEntryClass')
const WRITE = Symbol('write')
const ONDRAIN = Symbol('ondrain')
const fs = require('fs')
const path = require('path')
const warner = require('./warn-mixin.js')
const Pack = warner(class Pack extends MiniPass {
constructor (opt) {
super(opt)
opt = opt || Object.create(null)
this.opt = opt
this.cwd = opt.cwd || process.cwd()
this.maxReadSize = opt.maxReadSize
this.preservePaths = !!opt.preservePaths
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '')
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.readdirCache = opt.readdirCache || new Map()
this[WRITEENTRYCLASS] = WriteEntry
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
this.zip = null
if (opt.gzip) {
if (typeof opt.gzip !== 'object')
opt.gzip = {}
this.zip = new zlib.Gzip(opt.gzip)
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())
this.on('resume', _ => this.zip.resume())
} else
this.on('drain', this[ONDRAIN])
this.portable = !!opt.portable
this.noDirRecurse = !!opt.noDirRecurse
this.follow = !!opt.follow
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
this[QUEUE] = new Yallist
this[JOBS] = 0
this.jobs = +opt.jobs || 4
this[PROCESSING] = false
this[ENDED] = false
}
[WRITE] (chunk) {
return super.write(chunk)
}
add (path) {
this.write(path)
return this
}
end (path) {
if (path)
this.write(path)
this[ENDED] = true
this[PROCESS]()
return this
}
write (path) {
if (this[ENDED])
throw new Error('write after end')
if (path instanceof ReadEntry)
this[ADDTARENTRY](path)
else
this[ADDFSENTRY](path)
return this.flowing
}
[ADDTARENTRY] (p) {
const absolute = path.resolve(this.cwd, p.path)
if (this.prefix)
p.path = this.prefix + '/' + p.path.replace(/^\.(\/+|$)/, '')
// in this case, we don't have to wait for the stat
if (!this.filter(p.path, p))
p.resume()
else {
const job = new PackJob(p.path, absolute, false)
job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
job.entry.on('end', _ => this[JOBDONE](job))
this[JOBS] += 1
this[QUEUE].push(job)
}
this[PROCESS]()
}
[ADDFSENTRY] (p) {
const absolute = path.resolve(this.cwd, p)
if (this.prefix)
p = this.prefix + '/' + p.replace(/^\.(\/+|$)/, '')
this[QUEUE].push(new PackJob(p, absolute))
this[PROCESS]()
}
[STAT] (job) {
job.pending = true
this[JOBS] += 1
const stat = this.follow ? 'stat' : 'lstat'
fs[stat](job.absolute, (er, stat) => {
job.pending = false
this[JOBS] -= 1
if (er)
this.emit('error', er)
else
this[ONSTAT](job, stat)
})
}
[ONSTAT] (job, stat) {
this.statCache.set(job.absolute, stat)
job.stat = stat
// now we have the stat, we can filter it.
if (!this.filter(job.path, stat))
job.ignore = true
this[PROCESS]()
}
[READDIR] (job) {
job.pending = true
this[JOBS] += 1
fs.readdir(job.absolute, (er, entries) => {
job.pending = false
this[JOBS] -= 1
if (er)
return this.emit('error', er)
this[ONREADDIR](job, entries)
})
}
[ONREADDIR] (job, entries) {
this.readdirCache.set(job.absolute, entries)
job.readdir = entries
this[PROCESS]()
}
[PROCESS] () {
if (this[PROCESSING])
return
this[PROCESSING] = true
for (let w = this[QUEUE].head;
w !== null && this[JOBS] < this.jobs;
w = w.next) {
this[PROCESSJOB](w.value)
if (w.value.ignore) {
const p = w.next
this[QUEUE].removeNode(w)
w.next = p
}
}
this[PROCESSING] = false
if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
if (this.zip)
this.zip.end(EOF)
else {
super.write(EOF)
super.end()
}
}
}
get [CURRENT] () {
return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
}
[JOBDONE] (job) {
this[QUEUE].shift()
this[JOBS] -= 1
this[PROCESS]()
}
[PROCESSJOB] (job) {
if (job.pending)
return
if (job.entry) {
if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
return
}
if (!job.stat) {
if (this.statCache.has(job.absolute))
this[ONSTAT](job, this.statCache.get(job.absolute))
else
this[STAT](job)
}
if (!job.stat)
return
// filtered out!
if (job.ignore)
return
if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
if (this.readdirCache.has(job.absolute))
this[ONREADDIR](job, this.readdirCache.get(job.absolute))
else
this[READDIR](job)
if (!job.readdir)
return
}
// we know it doesn't have an entry, because that got checked above
job.entry = this[ENTRY](job)
if (!job.entry) {
job.ignore = true
return
}
if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
}
[ENTRYOPT] (job) {
return {
onwarn: (msg, data) => {
this.warn(msg, data)
},
noPax: this.noPax,
cwd: this.cwd,
absolute: job.absolute,
preservePaths: this.preservePaths,
maxReadSize: this.maxReadSize,
strict: this.strict,
portable: this.portable,
linkCache: this.linkCache,
statCache: this.statCache,
noMtime: this.noMtime,
mtime: this.mtime
}
}
[ENTRY] (job) {
this[JOBS] += 1
try {
return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
.on('end', () => this[JOBDONE](job))
.on('error', er => this.emit('error', er))
} catch (er) {
this.emit('error', er)
}
}
[ONDRAIN] () {
if (this[CURRENT] && this[CURRENT].entry)
this[CURRENT].entry.resume()
}
// like .pipe() but using super, because our write() is special
[PIPE] (job) {
job.piped = true
if (job.readdir)
job.readdir.forEach(entry => {
const p = this.prefix ?
job.path.slice(this.prefix.length + 1) || './'
: job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
const source = job.entry
const zip = this.zip
if (zip)
source.on('data', chunk => {
if (!zip.write(chunk))
source.pause()
})
else
source.on('data', chunk => {
if (!super.write(chunk))
source.pause()
})
}
pause () {
if (this.zip)
this.zip.pause()
return super.pause()
}
})
class PackSync extends Pack {
constructor (opt) {
super(opt)
this[WRITEENTRYCLASS] = WriteEntrySync
}
// pause/resume are no-ops in sync streams.
pause () {}
resume () {}
[STAT] (job) {
const stat = this.follow ? 'statSync' : 'lstatSync'
this[ONSTAT](job, fs[stat](job.absolute))
}
[READDIR] (job, stat) {
this[ONREADDIR](job, fs.readdirSync(job.absolute))
}
// gotta get it all in this tick
[PIPE] (job) {
const source = job.entry
const zip = this.zip
if (job.readdir)
job.readdir.forEach(entry => {
const p = this.prefix ?
job.path.slice(this.prefix.length + 1) || './'
: job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
if (zip)
source.on('data', chunk => {
zip.write(chunk)
})
else
source.on('data', chunk => {
super[WRITE](chunk)
})
}
}
Pack.Sync = PackSync
module.exports = Pack
This diff is collapsed.
'use strict'
const Buffer = require('./buffer.js')
const Header = require('./header.js')
const path = require('path')
class Pax {
constructor (obj, global) {
this.atime = obj.atime || null
this.charset = obj.charset || null
this.comment = obj.comment || null
this.ctime = obj.ctime || null
this.gid = obj.gid || null
this.gname = obj.gname || null
this.linkpath = obj.linkpath || null
this.mtime = obj.mtime || null
this.path = obj.path || null
this.size = obj.size || null
this.uid = obj.uid || null
this.uname = obj.uname || null
this.dev = obj.dev || null
this.ino = obj.ino || null
this.nlink = obj.nlink || null
this.global = global || false
}
encode () {
const body = this.encodeBody()
if (body === '')
return null
const bodyLen = Buffer.byteLength(body)
// round up to 512 bytes
// add 512 for header
const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
const buf = Buffer.allocUnsafe(bufLen)
// 0-fill the header section, it might not hit every field
for (let i = 0; i < 512; i++) {
buf[i] = 0
}
new Header({
// XXX split the path
// then the path should be PaxHeader + basename, but less than 99,
// prepend with the dirname
path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
mode: this.mode || 0o644,
uid: this.uid || null,
gid: this.gid || null,
size: bodyLen,
mtime: this.mtime || null,
type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
linkpath: '',
uname: this.uname || '',
gname: this.gname || '',
devmaj: 0,
devmin: 0,
atime: this.atime || null,
ctime: this.ctime || null
}).encode(buf)
buf.write(body, 512, bodyLen, 'utf8')
// null pad after the body
for (let i = bodyLen + 512; i < buf.length; i++) {
buf[i] = 0
}
return buf
}
encodeBody () {
return (
this.encodeField('path') +
this.encodeField('ctime') +
this.encodeField('atime') +
this.encodeField('dev') +
this.encodeField('ino') +
this.encodeField('nlink') +
this.encodeField('charset') +
this.encodeField('comment') +
this.encodeField('gid') +
this.encodeField('gname') +
this.encodeField('linkpath') +
this.encodeField('mtime') +
this.encodeField('size') +
this.encodeField('uid') +
this.encodeField('uname')
)
}
encodeField (field) {
if (this[field] === null || this[field] === undefined)
return ''
const v = this[field] instanceof Date ? this[field].getTime() / 1000
: this[field]
const s = ' ' +
(field === 'dev' || field === 'ino' || field === 'nlink'
? 'SCHILY.' : '') +
field + '=' + v + '\n'
const byteLen = Buffer.byteLength(s)
// the digits includes the length of the digits in ascii base-10
// so if it's 9 characters, then adding 1 for the 9 makes it 10
// which makes it 11 chars.
let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
if (byteLen + digits >= Math.pow(10, digits))
digits += 1
const len = digits + byteLen
return len + s
}
}
Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
const merge = (a, b) =>
b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
const parseKV = string =>
string
.replace(/\n$/, '')
.split('\n')
.reduce(parseKVLine, Object.create(null))
const parseKVLine = (set, line) => {
const n = parseInt(line, 10)
// XXX Values with \n in them will fail this.
// Refactor to not be a naive line-by-line parse.
if (n !== Buffer.byteLength(line) + 1)
return set
line = line.substr((n + ' ').length)
const kv = line.split('=')
const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
if (!k)
return set
const v = kv.join('=')
set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
? new Date(v * 1000)
: /^[0-9]+$/.test(v) ? +v
: v
return set
}
module.exports = Pax
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
'use strict'
module.exports = Base => class extends Base {
warn (msg, data) {
if (!this.strict)
this.emit('warn', msg, data)
else if (data instanceof Error)
this.emit('error', data)
else {
const er = new Error(msg)
er.data = data
this.emit('error', er)
}
}
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment