aboutsummaryrefslogtreecommitdiff
path: root/node_modules/tar-stream
diff options
context:
space:
mode:
authorMinteck <freeziv.ytb@gmail.com>2021-03-07 18:29:17 +0100
committerMinteck <freeziv.ytb@gmail.com>2021-03-07 18:29:17 +0100
commit0f79e708bf07721b73ea41e5d341be08e8ea4dce (patch)
treef3c63cd6a9f4ef0b26f95eec6a031600232e80c8 /node_modules/tar-stream
downloadelectrode-0f79e708bf07721b73ea41e5d341be08e8ea4dce.tar.gz
electrode-0f79e708bf07721b73ea41e5d341be08e8ea4dce.tar.bz2
electrode-0f79e708bf07721b73ea41e5d341be08e8ea4dce.zip
Initial commit
Diffstat (limited to 'node_modules/tar-stream')
-rw-r--r--node_modules/tar-stream/LICENSE21
-rw-r--r--node_modules/tar-stream/README.md168
-rw-r--r--node_modules/tar-stream/extract.js257
-rw-r--r--node_modules/tar-stream/headers.js293
-rw-r--r--node_modules/tar-stream/index.js2
-rw-r--r--node_modules/tar-stream/pack.js255
-rw-r--r--node_modules/tar-stream/package.json89
-rw-r--r--node_modules/tar-stream/sandbox.js11
8 files changed, 1096 insertions, 0 deletions
diff --git a/node_modules/tar-stream/LICENSE b/node_modules/tar-stream/LICENSE
new file mode 100644
index 0000000..757562e
--- /dev/null
+++ b/node_modules/tar-stream/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Mathias Buus
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE. \ No newline at end of file
diff --git a/node_modules/tar-stream/README.md b/node_modules/tar-stream/README.md
new file mode 100644
index 0000000..2679d9d
--- /dev/null
+++ b/node_modules/tar-stream/README.md
@@ -0,0 +1,168 @@
+# tar-stream
+
+tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.
+
+Note that you still need to gunzip your data if you have a `.tar.gz`. We recommend using [gunzip-maybe](https://github.com/mafintosh/gunzip-maybe) in conjunction with this.
+
+```
+npm install tar-stream
+```
+
+[![build status](https://secure.travis-ci.org/mafintosh/tar-stream.png)](http://travis-ci.org/mafintosh/tar-stream)
+[![License](https://img.shields.io/badge/license-MIT-blue.svg)](http://opensource.org/licenses/MIT)
+
+## Usage
+
+tar-stream exposes two streams, [pack](https://github.com/mafintosh/tar-stream#packing) which creates tarballs and [extract](https://github.com/mafintosh/tar-stream#extracting) which extracts tarballs. To [modify an existing tarball](https://github.com/mafintosh/tar-stream#modifying-existing-tarballs) use both.
+
+
+It implementes USTAR with additional support for pax extended headers. It should be compatible with all popular tar distributions out there (gnutar, bsdtar etc)
+
+## Related
+
+If you want to pack/unpack directories on the file system check out [tar-fs](https://github.com/mafintosh/tar-fs) which provides file system bindings to this module.
+
+## Packing
+
+To create a pack stream use `tar.pack()` and call `pack.entry(header, [callback])` to add tar entries.
+
+``` js
+var tar = require('tar-stream')
+var pack = tar.pack() // pack is a streams2 stream
+
+// add a file called my-test.txt with the content "Hello World!"
+pack.entry({ name: 'my-test.txt' }, 'Hello World!')
+
+// add a file called my-stream-test.txt from a stream
+var entry = pack.entry({ name: 'my-stream-test.txt', size: 11 }, function(err) {
+ // the stream was added
+ // no more entries
+ pack.finalize()
+})
+
+entry.write('hello')
+entry.write(' ')
+entry.write('world')
+entry.end()
+
+// pipe the pack stream somewhere
+pack.pipe(process.stdout)
+```
+
+## Extracting
+
+To extract a stream use `tar.extract()` and listen for `extract.on('entry', (header, stream, next) )`
+
+``` js
+var extract = tar.extract()
+
+extract.on('entry', function(header, stream, next) {
+ // header is the tar header
+ // stream is the content body (might be an empty stream)
+ // call next when you are done with this entry
+
+ stream.on('end', function() {
+ next() // ready for next entry
+ })
+
+ stream.resume() // just auto drain the stream
+})
+
+extract.on('finish', function() {
+ // all entries read
+})
+
+pack.pipe(extract)
+```
+
+The tar archive is streamed sequentially, meaning you **must** drain each entry's stream as you get them or else the main extract stream will receive backpressure and stop reading.
+
+## Headers
+
+The header object using in `entry` should contain the following properties.
+Most of these values can be found by stat'ing a file.
+
+``` js
+{
+ name: 'path/to/this/entry.txt',
+ size: 1314, // entry size. defaults to 0
+ mode: 0o644, // entry mode. defaults to to 0o755 for dirs and 0o644 otherwise
+ mtime: new Date(), // last modified date for entry. defaults to now.
+ type: 'file', // type of entry. defaults to file. can be:
+ // file | link | symlink | directory | block-device
+ // character-device | fifo | contiguous-file
+ linkname: 'path', // linked file name
+ uid: 0, // uid of entry owner. defaults to 0
+ gid: 0, // gid of entry owner. defaults to 0
+ uname: 'maf', // uname of entry owner. defaults to null
+ gname: 'staff', // gname of entry owner. defaults to null
+ devmajor: 0, // device major version. defaults to 0
+ devminor: 0 // device minor version. defaults to 0
+}
+```
+
+## Modifying existing tarballs
+
+Using tar-stream it is easy to rewrite paths / change modes etc in an existing tarball.
+
+``` js
+var extract = tar.extract()
+var pack = tar.pack()
+var path = require('path')
+
+extract.on('entry', function(header, stream, callback) {
+ // let's prefix all names with 'tmp'
+ header.name = path.join('tmp', header.name)
+ // write the new entry to the pack stream
+ stream.pipe(pack.entry(header, callback))
+})
+
+extract.on('finish', function() {
+ // all entries done - lets finalize it
+ pack.finalize()
+})
+
+// pipe the old tarball to the extractor
+oldTarballStream.pipe(extract)
+
+// pipe the new tarball the another stream
+pack.pipe(newTarballStream)
+```
+
+## Saving tarball to fs
+
+
+``` js
+var fs = require('fs')
+var tar = require('tar-stream')
+
+var pack = tar.pack() // pack is a streams2 stream
+var path = 'YourTarBall.tar'
+var yourTarball = fs.createWriteStream(path)
+
+// add a file called YourFile.txt with the content "Hello World!"
+pack.entry({name: 'YourFile.txt'}, 'Hello World!', function (err) {
+ if (err) throw err
+ pack.finalize()
+})
+
+// pipe the pack stream to your file
+pack.pipe(yourTarball)
+
+yourTarball.on('close', function () {
+ console.log(path + ' has been written')
+ fs.stat(path, function(err, stats) {
+ if (err) throw err
+ console.log(stats)
+ console.log('Got file info successfully!')
+ })
+})
+```
+
+## Performance
+
+[See tar-fs for a performance comparison with node-tar](https://github.com/mafintosh/tar-fs/blob/master/README.md#performance)
+
+# License
+
+MIT
diff --git a/node_modules/tar-stream/extract.js b/node_modules/tar-stream/extract.js
new file mode 100644
index 0000000..6278c38
--- /dev/null
+++ b/node_modules/tar-stream/extract.js
@@ -0,0 +1,257 @@
+var util = require('util')
+var bl = require('bl')
+var headers = require('./headers')
+
+var Writable = require('readable-stream').Writable
+var PassThrough = require('readable-stream').PassThrough
+
+var noop = function () {}
+
+var overflow = function (size) {
+ size &= 511
+ return size && 512 - size
+}
+
+var emptyStream = function (self, offset) {
+ var s = new Source(self, offset)
+ s.end()
+ return s
+}
+
+var mixinPax = function (header, pax) {
+ if (pax.path) header.name = pax.path
+ if (pax.linkpath) header.linkname = pax.linkpath
+ if (pax.size) header.size = parseInt(pax.size, 10)
+ header.pax = pax
+ return header
+}
+
+var Source = function (self, offset) {
+ this._parent = self
+ this.offset = offset
+ PassThrough.call(this, { autoDestroy: false })
+}
+
+util.inherits(Source, PassThrough)
+
+Source.prototype.destroy = function (err) {
+ this._parent.destroy(err)
+}
+
+var Extract = function (opts) {
+ if (!(this instanceof Extract)) return new Extract(opts)
+ Writable.call(this, opts)
+
+ opts = opts || {}
+
+ this._offset = 0
+ this._buffer = bl()
+ this._missing = 0
+ this._partial = false
+ this._onparse = noop
+ this._header = null
+ this._stream = null
+ this._overflow = null
+ this._cb = null
+ this._locked = false
+ this._destroyed = false
+ this._pax = null
+ this._paxGlobal = null
+ this._gnuLongPath = null
+ this._gnuLongLinkPath = null
+
+ var self = this
+ var b = self._buffer
+
+ var oncontinue = function () {
+ self._continue()
+ }
+
+ var onunlock = function (err) {
+ self._locked = false
+ if (err) return self.destroy(err)
+ if (!self._stream) oncontinue()
+ }
+
+ var onstreamend = function () {
+ self._stream = null
+ var drain = overflow(self._header.size)
+ if (drain) self._parse(drain, ondrain)
+ else self._parse(512, onheader)
+ if (!self._locked) oncontinue()
+ }
+
+ var ondrain = function () {
+ self._buffer.consume(overflow(self._header.size))
+ self._parse(512, onheader)
+ oncontinue()
+ }
+
+ var onpaxglobalheader = function () {
+ var size = self._header.size
+ self._paxGlobal = headers.decodePax(b.slice(0, size))
+ b.consume(size)
+ onstreamend()
+ }
+
+ var onpaxheader = function () {
+ var size = self._header.size
+ self._pax = headers.decodePax(b.slice(0, size))
+ if (self._paxGlobal) self._pax = Object.assign({}, self._paxGlobal, self._pax)
+ b.consume(size)
+ onstreamend()
+ }
+
+ var ongnulongpath = function () {
+ var size = self._header.size
+ this._gnuLongPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
+ b.consume(size)
+ onstreamend()
+ }
+
+ var ongnulonglinkpath = function () {
+ var size = self._header.size
+ this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
+ b.consume(size)
+ onstreamend()
+ }
+
+ var onheader = function () {
+ var offset = self._offset
+ var header
+ try {
+ header = self._header = headers.decode(b.slice(0, 512), opts.filenameEncoding)
+ } catch (err) {
+ self.emit('error', err)
+ }
+ b.consume(512)
+
+ if (!header) {
+ self._parse(512, onheader)
+ oncontinue()
+ return
+ }
+ if (header.type === 'gnu-long-path') {
+ self._parse(header.size, ongnulongpath)
+ oncontinue()
+ return
+ }
+ if (header.type === 'gnu-long-link-path') {
+ self._parse(header.size, ongnulonglinkpath)
+ oncontinue()
+ return
+ }
+ if (header.type === 'pax-global-header') {
+ self._parse(header.size, onpaxglobalheader)
+ oncontinue()
+ return
+ }
+ if (header.type === 'pax-header') {
+ self._parse(header.size, onpaxheader)
+ oncontinue()
+ return
+ }
+
+ if (self._gnuLongPath) {
+ header.name = self._gnuLongPath
+ self._gnuLongPath = null
+ }
+
+ if (self._gnuLongLinkPath) {
+ header.linkname = self._gnuLongLinkPath
+ self._gnuLongLinkPath = null
+ }
+
+ if (self._pax) {
+ self._header = header = mixinPax(header, self._pax)
+ self._pax = null
+ }
+
+ self._locked = true
+
+ if (!header.size || header.type === 'directory') {
+ self._parse(512, onheader)
+ self.emit('entry', header, emptyStream(self, offset), onunlock)
+ return
+ }
+
+ self._stream = new Source(self, offset)
+
+ self.emit('entry', header, self._stream, onunlock)
+ self._parse(header.size, onstreamend)
+ oncontinue()
+ }
+
+ this._onheader = onheader
+ this._parse(512, onheader)
+}
+
+util.inherits(Extract, Writable)
+
+Extract.prototype.destroy = function (err) {
+ if (this._destroyed) return
+ this._destroyed = true
+
+ if (err) this.emit('error', err)
+ this.emit('close')
+ if (this._stream) this._stream.emit('close')
+}
+
+Extract.prototype._parse = function (size, onparse) {
+ if (this._destroyed) return
+ this._offset += size
+ this._missing = size
+ if (onparse === this._onheader) this._partial = false
+ this._onparse = onparse
+}
+
+Extract.prototype._continue = function () {
+ if (this._destroyed) return
+ var cb = this._cb
+ this._cb = noop
+ if (this._overflow) this._write(this._overflow, undefined, cb)
+ else cb()
+}
+
+Extract.prototype._write = function (data, enc, cb) {
+ if (this._destroyed) return
+
+ var s = this._stream
+ var b = this._buffer
+ var missing = this._missing
+ if (data.length) this._partial = true
+
+ // we do not reach end-of-chunk now. just forward it
+
+ if (data.length < missing) {
+ this._missing -= data.length
+ this._overflow = null
+ if (s) return s.write(data, cb)
+ b.append(data)
+ return cb()
+ }
+
+ // end-of-chunk. the parser should call cb.
+
+ this._cb = cb
+ this._missing = 0
+
+ var overflow = null
+ if (data.length > missing) {
+ overflow = data.slice(missing)
+ data = data.slice(0, missing)
+ }
+
+ if (s) s.end(data)
+ else b.append(data)
+
+ this._overflow = overflow
+ this._onparse()
+}
+
+Extract.prototype._final = function (cb) {
+ if (this._partial) return this.destroy(new Error('Unexpected end of data'))
+ cb()
+}
+
+module.exports = Extract
diff --git a/node_modules/tar-stream/headers.js b/node_modules/tar-stream/headers.js
new file mode 100644
index 0000000..2787cfd
--- /dev/null
+++ b/node_modules/tar-stream/headers.js
@@ -0,0 +1,293 @@
+var alloc = Buffer.alloc
+
+var ZEROS = '0000000000000000000'
+var SEVENS = '7777777777777777777'
+var ZERO_OFFSET = '0'.charCodeAt(0)
+var USTAR_MAGIC = Buffer.from('ustar\x00', 'binary')
+var USTAR_VER = Buffer.from('00', 'binary')
+var GNU_MAGIC = Buffer.from('ustar\x20', 'binary')
+var GNU_VER = Buffer.from('\x20\x00', 'binary')
+var MASK = parseInt('7777', 8)
+var MAGIC_OFFSET = 257
+var VERSION_OFFSET = 263
+
+var clamp = function (index, len, defaultValue) {
+ if (typeof index !== 'number') return defaultValue
+ index = ~~index // Coerce to integer.
+ if (index >= len) return len
+ if (index >= 0) return index
+ index += len
+ if (index >= 0) return index
+ return 0
+}
+
+var toType = function (flag) {
+ switch (flag) {
+ case 0:
+ return 'file'
+ case 1:
+ return 'link'
+ case 2:
+ return 'symlink'
+ case 3:
+ return 'character-device'
+ case 4:
+ return 'block-device'
+ case 5:
+ return 'directory'
+ case 6:
+ return 'fifo'
+ case 7:
+ return 'contiguous-file'
+ case 72:
+ return 'pax-header'
+ case 55:
+ return 'pax-global-header'
+ case 27:
+ return 'gnu-long-link-path'
+ case 28:
+ case 30:
+ return 'gnu-long-path'
+ }
+
+ return null
+}
+
+var toTypeflag = function (flag) {
+ switch (flag) {
+ case 'file':
+ return 0
+ case 'link':
+ return 1
+ case 'symlink':
+ return 2
+ case 'character-device':
+ return 3
+ case 'block-device':
+ return 4
+ case 'directory':
+ return 5
+ case 'fifo':
+ return 6
+ case 'contiguous-file':
+ return 7
+ case 'pax-header':
+ return 72
+ }
+
+ return 0
+}
+
+var indexOf = function (block, num, offset, end) {
+ for (; offset < end; offset++) {
+ if (block[offset] === num) return offset
+ }
+ return end
+}
+
+var cksum = function (block) {
+ var sum = 8 * 32
+ for (var i = 0; i < 148; i++) sum += block[i]
+ for (var j = 156; j < 512; j++) sum += block[j]
+ return sum
+}
+
+var encodeOct = function (val, n) {
+ val = val.toString(8)
+ if (val.length > n) return SEVENS.slice(0, n) + ' '
+ else return ZEROS.slice(0, n - val.length) + val + ' '
+}
+
+/* Copied from the node-tar repo and modified to meet
+ * tar-stream coding standard.
+ *
+ * Source: https://github.com/npm/node-tar/blob/51b6627a1f357d2eb433e7378e5f05e83b7aa6cd/lib/header.js#L349
+ */
+function parse256 (buf) {
+ // first byte MUST be either 80 or FF
+ // 80 for positive, FF for 2's comp
+ var positive
+ if (buf[0] === 0x80) positive = true
+ else if (buf[0] === 0xFF) positive = false
+ else return null
+
+ // build up a base-256 tuple from the least sig to the highest
+ var tuple = []
+ for (var i = buf.length - 1; i > 0; i--) {
+ var byte = buf[i]
+ if (positive) tuple.push(byte)
+ else tuple.push(0xFF - byte)
+ }
+
+ var sum = 0
+ var l = tuple.length
+ for (i = 0; i < l; i++) {
+ sum += tuple[i] * Math.pow(256, i)
+ }
+
+ return positive ? sum : -1 * sum
+}
+
+var decodeOct = function (val, offset, length) {
+ val = val.slice(offset, offset + length)
+ offset = 0
+
+ // If prefixed with 0x80 then parse as a base-256 integer
+ if (val[offset] & 0x80) {
+ return parse256(val)
+ } else {
+ // Older versions of tar can prefix with spaces
+ while (offset < val.length && val[offset] === 32) offset++
+ var end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
+ while (offset < end && val[offset] === 0) offset++
+ if (end === offset) return 0
+ return parseInt(val.slice(offset, end).toString(), 8)
+ }
+}
+
+var decodeStr = function (val, offset, length, encoding) {
+ return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString(encoding)
+}
+
+var addLength = function (str) {
+ var len = Buffer.byteLength(str)
+ var digits = Math.floor(Math.log(len) / Math.log(10)) + 1
+ if (len + digits >= Math.pow(10, digits)) digits++
+
+ return (len + digits) + str
+}
+
+exports.decodeLongPath = function (buf, encoding) {
+ return decodeStr(buf, 0, buf.length, encoding)
+}
+
+exports.encodePax = function (opts) { // TODO: encode more stuff in pax
+ var result = ''
+ if (opts.name) result += addLength(' path=' + opts.name + '\n')
+ if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
+ var pax = opts.pax
+ if (pax) {
+ for (var key in pax) {
+ result += addLength(' ' + key + '=' + pax[key] + '\n')
+ }
+ }
+ return Buffer.from(result)
+}
+
+exports.decodePax = function (buf) {
+ var result = {}
+
+ while (buf.length) {
+ var i = 0
+ while (i < buf.length && buf[i] !== 32) i++
+ var len = parseInt(buf.slice(0, i).toString(), 10)
+ if (!len) return result
+
+ var b = buf.slice(i + 1, len - 1).toString()
+ var keyIndex = b.indexOf('=')
+ if (keyIndex === -1) return result
+ result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
+
+ buf = buf.slice(len)
+ }
+
+ return result
+}
+
+exports.encode = function (opts) {
+ var buf = alloc(512)
+ var name = opts.name
+ var prefix = ''
+
+ if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
+ if (Buffer.byteLength(name) !== name.length) return null // utf-8
+
+ while (Buffer.byteLength(name) > 100) {
+ var i = name.indexOf('/')
+ if (i === -1) return null
+ prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
+ name = name.slice(i + 1)
+ }
+
+ if (Buffer.byteLength(name) > 100 || Buffer.byteLength(prefix) > 155) return null
+ if (opts.linkname && Buffer.byteLength(opts.linkname) > 100) return null
+
+ buf.write(name)
+ buf.write(encodeOct(opts.mode & MASK, 6), 100)
+ buf.write(encodeOct(opts.uid, 6), 108)
+ buf.write(encodeOct(opts.gid, 6), 116)
+ buf.write(encodeOct(opts.size, 11), 124)
+ buf.write(encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
+
+ buf[156] = ZERO_OFFSET + toTypeflag(opts.type)
+
+ if (opts.linkname) buf.write(opts.linkname, 157)
+
+ USTAR_MAGIC.copy(buf, MAGIC_OFFSET)
+ USTAR_VER.copy(buf, VERSION_OFFSET)
+ if (opts.uname) buf.write(opts.uname, 265)
+ if (opts.gname) buf.write(opts.gname, 297)
+ buf.write(encodeOct(opts.devmajor || 0, 6), 329)
+ buf.write(encodeOct(opts.devminor || 0, 6), 337)
+
+ if (prefix) buf.write(prefix, 345)
+
+ buf.write(encodeOct(cksum(buf), 6), 148)
+
+ return buf
+}
+
+exports.decode = function (buf, filenameEncoding) {
+ var typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
+
+ var name = decodeStr(buf, 0, 100, filenameEncoding)
+ var mode = decodeOct(buf, 100, 8)
+ var uid = decodeOct(buf, 108, 8)
+ var gid = decodeOct(buf, 116, 8)
+ var size = decodeOct(buf, 124, 12)
+ var mtime = decodeOct(buf, 136, 12)
+ var type = toType(typeflag)
+ var linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding)
+ var uname = decodeStr(buf, 265, 32)
+ var gname = decodeStr(buf, 297, 32)
+ var devmajor = decodeOct(buf, 329, 8)
+ var devminor = decodeOct(buf, 337, 8)
+
+ var c = cksum(buf)
+
+ // checksum is still initial value if header was null.
+ if (c === 8 * 32) return null
+
+ // valid checksum
+ if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
+
+ if (USTAR_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0) {
+ // ustar (posix) format.
+ // prepend prefix, if present.
+ if (buf[345]) name = decodeStr(buf, 345, 155, filenameEncoding) + '/' + name
+ } else if (GNU_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0 &&
+ GNU_VER.compare(buf, VERSION_OFFSET, VERSION_OFFSET + 2) === 0) {
+ // 'gnu'/'oldgnu' format. Similar to ustar, but has support for incremental and
+ // multi-volume tarballs.
+ } else {
+ throw new Error('Invalid tar header: unknown format.')
+ }
+
+ // to support old tar versions that use trailing / to indicate dirs
+ if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
+
+ return {
+ name,
+ mode,
+ uid,
+ gid,
+ size,
+ mtime: new Date(1000 * mtime),
+ type,
+ linkname,
+ uname,
+ gname,
+ devmajor,
+ devminor
+ }
+}
diff --git a/node_modules/tar-stream/index.js b/node_modules/tar-stream/index.js
new file mode 100644
index 0000000..6481704
--- /dev/null
+++ b/node_modules/tar-stream/index.js
@@ -0,0 +1,2 @@
+exports.extract = require('./extract')
+exports.pack = require('./pack')
diff --git a/node_modules/tar-stream/pack.js b/node_modules/tar-stream/pack.js
new file mode 100644
index 0000000..f1da3b7
--- /dev/null
+++ b/node_modules/tar-stream/pack.js
@@ -0,0 +1,255 @@
+var constants = require('fs-constants')
+var eos = require('end-of-stream')
+var inherits = require('inherits')
+var alloc = Buffer.alloc
+
+var Readable = require('readable-stream').Readable
+var Writable = require('readable-stream').Writable
+var StringDecoder = require('string_decoder').StringDecoder
+
+var headers = require('./headers')
+
+var DMODE = parseInt('755', 8)
+var FMODE = parseInt('644', 8)
+
+var END_OF_TAR = alloc(1024)
+
+var noop = function () {}
+
+var overflow = function (self, size) {
+ size &= 511
+ if (size) self.push(END_OF_TAR.slice(0, 512 - size))
+}
+
+function modeToType (mode) {
+ switch (mode & constants.S_IFMT) {
+ case constants.S_IFBLK: return 'block-device'
+ case constants.S_IFCHR: return 'character-device'
+ case constants.S_IFDIR: return 'directory'
+ case constants.S_IFIFO: return 'fifo'
+ case constants.S_IFLNK: return 'symlink'
+ }
+
+ return 'file'
+}
+
+var Sink = function (to) {
+ Writable.call(this)
+ this.written = 0
+ this._to = to
+ this._destroyed = false
+}
+
+inherits(Sink, Writable)
+
+Sink.prototype._write = function (data, enc, cb) {
+ this.written += data.length
+ if (this._to.push(data)) return cb()
+ this._to._drain = cb
+}
+
+Sink.prototype.destroy = function () {
+ if (this._destroyed) return
+ this._destroyed = true
+ this.emit('close')
+}
+
+var LinkSink = function () {
+ Writable.call(this)
+ this.linkname = ''
+ this._decoder = new StringDecoder('utf-8')
+ this._destroyed = false
+}
+
+inherits(LinkSink, Writable)
+
+LinkSink.prototype._write = function (data, enc, cb) {
+ this.linkname += this._decoder.write(data)
+ cb()
+}
+
+LinkSink.prototype.destroy = function () {
+ if (this._destroyed) return
+ this._destroyed = true
+ this.emit('close')
+}
+
+var Void = function () {
+ Writable.call(this)
+ this._destroyed = false
+}
+
+inherits(Void, Writable)
+
+Void.prototype._write = function (data, enc, cb) {
+ cb(new Error('No body allowed for this entry'))
+}
+
+Void.prototype.destroy = function () {
+ if (this._destroyed) return
+ this._destroyed = true
+ this.emit('close')
+}
+
+var Pack = function (opts) {
+ if (!(this instanceof Pack)) return new Pack(opts)
+ Readable.call(this, opts)
+
+ this._drain = noop
+ this._finalized = false
+ this._finalizing = false
+ this._destroyed = false
+ this._stream = null
+}
+
+inherits(Pack, Readable)
+
+Pack.prototype.entry = function (header, buffer, callback) {
+ if (this._stream) throw new Error('already piping an entry')
+ if (this._finalized || this._destroyed) return
+
+ if (typeof buffer === 'function') {
+ callback = buffer
+ buffer = null
+ }
+
+ if (!callback) callback = noop
+
+ var self = this
+
+ if (!header.size || header.type === 'symlink') header.size = 0
+ if (!header.type) header.type = modeToType(header.mode)
+ if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
+ if (!header.uid) header.uid = 0
+ if (!header.gid) header.gid = 0
+ if (!header.mtime) header.mtime = new Date()
+
+ if (typeof buffer === 'string') buffer = Buffer.from(buffer)
+ if (Buffer.isBuffer(buffer)) {
+ header.size = buffer.length
+ this._encode(header)
+ var ok = this.push(buffer)
+ overflow(self, header.size)
+ if (ok) process.nextTick(callback)
+ else this._drain = callback
+ return new Void()
+ }
+
+ if (header.type === 'symlink' && !header.linkname) {
+ var linkSink = new LinkSink()
+ eos(linkSink, function (err) {
+ if (err) { // stream was closed
+ self.destroy()
+ return callback(err)
+ }
+
+ header.linkname = linkSink.linkname
+ self._encode(header)
+ callback()
+ })
+
+ return linkSink
+ }
+
+ this._encode(header)
+
+ if (header.type !== 'file' && header.type !== 'contiguous-file') {
+ process.nextTick(callback)
+ return new Void()
+ }
+
+ var sink = new Sink(this)
+
+ this._stream = sink
+
+ eos(sink, function (err) {
+ self._stream = null
+
+ if (err) { // stream was closed
+ self.destroy()
+ return callback(err)
+ }
+
+ if (sink.written !== header.size) { // corrupting tar
+ self.destroy()
+ return callback(new Error('size mismatch'))
+ }
+
+ overflow(self, header.size)
+ if (self._finalizing) self.finalize()
+ callback()
+ })
+
+ return sink
+}
+
+Pack.prototype.finalize = function () {
+ if (this._stream) {
+ this._finalizing = true
+ return
+ }
+
+ if (this._finalized) return
+ this._finalized = true
+ this.push(END_OF_TAR)
+ this.push(null)
+}
+
+Pack.prototype.destroy = function (err) {
+ if (this._destroyed) return
+ this._destroyed = true
+
+ if (err) this.emit('error', err)
+ this.emit('close')
+ if (this._stream && this._stream.destroy) this._stream.destroy()
+}
+
+Pack.prototype._encode = function (header) {
+ if (!header.pax) {
+ var buf = headers.encode(header)
+ if (buf) {
+ this.push(buf)
+ return
+ }
+ }
+ this._encodePax(header)
+}
+
+Pack.prototype._encodePax = function (header) {
+ var paxHeader = headers.encodePax({
+ name: header.name,
+ linkname: header.linkname,
+ pax: header.pax
+ })
+
+ var newHeader = {
+ name: 'PaxHeader',
+ mode: header.mode,
+ uid: header.uid,
+ gid: header.gid,
+ size: paxHeader.length,
+ mtime: header.mtime,
+ type: 'pax-header',
+ linkname: header.linkname && 'PaxHeader',
+ uname: header.uname,
+ gname: header.gname,
+ devmajor: header.devmajor,
+ devminor: header.devminor
+ }
+
+ this.push(headers.encode(newHeader))
+ this.push(paxHeader)
+ overflow(this, paxHeader.length)
+
+ newHeader.size = header.size
+ newHeader.type = header.type
+ this.push(headers.encode(newHeader))
+}
+
+Pack.prototype._read = function (n) {
+ var drain = this._drain
+ this._drain = noop
+ drain()
+}
+
+module.exports = Pack
diff --git a/node_modules/tar-stream/package.json b/node_modules/tar-stream/package.json
new file mode 100644
index 0000000..2ee236c
--- /dev/null
+++ b/node_modules/tar-stream/package.json
@@ -0,0 +1,89 @@
+{
+ "_args": [
+ [
+ "tar-stream@2.1.4",
+ "/data/dev/Projets/FNS Electrode/Projets/FNS Electrode"
+ ]
+ ],
+ "_from": "tar-stream@2.1.4",
+ "_id": "tar-stream@2.1.4",
+ "_inBundle": false,
+ "_integrity": "sha512-o3pS2zlG4gxr67GmFYBLlq+dM8gyRGUOvsrHclSkvtVtQbjV0s/+ZE8OpICbaj8clrX3tjeHngYGP7rweaBnuw==",
+ "_location": "/tar-stream",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "version",
+ "registry": true,
+ "raw": "tar-stream@2.1.4",
+ "name": "tar-stream",
+ "escapedName": "tar-stream",
+ "rawSpec": "2.1.4",
+ "saveSpec": null,
+ "fetchSpec": "2.1.4"
+ },
+ "_requiredBy": [
+ "/tar-fs"
+ ],
+ "_resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.1.4.tgz",
+ "_spec": "2.1.4",
+ "_where": "/data/dev/Projets/FNS Electrode/Projets/FNS Electrode",
+ "author": {
+ "name": "Mathias Buus",
+ "email": "mathiasbuus@gmail.com"
+ },
+ "bugs": {
+ "url": "https://github.com/mafintosh/tar-stream/issues"
+ },
+ "dependencies": {
+ "bl": "^4.0.3",
+ "end-of-stream": "^1.4.1",
+ "fs-constants": "^1.0.0",
+ "inherits": "^2.0.3",
+ "readable-stream": "^3.1.1"
+ },
+ "description": "tar-stream is a streaming tar parser and generator and nothing else. It is streams2 and operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",
+ "devDependencies": {
+ "concat-stream": "^2.0.0",
+ "standard": "^12.0.1",
+ "tape": "^4.9.2"
+ },
+ "directories": {
+ "test": "test"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "files": [
+ "*.js",
+ "LICENSE"
+ ],
+ "homepage": "https://github.com/mafintosh/tar-stream",
+ "keywords": [
+ "tar",
+ "tarball",
+ "parse",
+ "parser",
+ "generate",
+ "generator",
+ "stream",
+ "stream2",
+ "streams",
+ "streams2",
+ "streaming",
+ "pack",
+ "extract",
+ "modify"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "tar-stream",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/mafintosh/tar-stream.git"
+ },
+ "scripts": {
+ "test": "standard && tape test/extract.js test/pack.js",
+ "test-all": "standard && tape test/*.js"
+ },
+ "version": "2.1.4"
+}
diff --git a/node_modules/tar-stream/sandbox.js b/node_modules/tar-stream/sandbox.js
new file mode 100644
index 0000000..9b82d40
--- /dev/null
+++ b/node_modules/tar-stream/sandbox.js
@@ -0,0 +1,11 @@
+const tar = require('tar-stream')
+const fs = require('fs')
+const path = require('path')
+const pipeline = require('pump') // eequire('stream').pipeline
+
+fs.createReadStream('test.tar')
+ .pipe(tar.extract())
+ .on('entry', function (header, stream, done) {
+ console.log(header.name)
+ pipeline(stream, fs.createWriteStream(path.join('/tmp', header.name)), done)
+ })