From ae187b6d75c8079da0be1dc288613bad8466fe61 Mon Sep 17 00:00:00 2001 From: RaindropsSys Date: Tue, 24 Oct 2023 17:43:37 +0200 Subject: Initial commit --- desktop/node_modules/@electron/asar/LICENSE.md | 20 ++ desktop/node_modules/@electron/asar/README.md | 213 ++++++++++++++++++ desktop/node_modules/@electron/asar/bin/asar.js | 82 +++++++ desktop/node_modules/@electron/asar/lib/asar.js | 229 +++++++++++++++++++ desktop/node_modules/@electron/asar/lib/crawlfs.js | 41 ++++ desktop/node_modules/@electron/asar/lib/disk.js | 123 ++++++++++ .../node_modules/@electron/asar/lib/filesystem.js | 154 +++++++++++++ desktop/node_modules/@electron/asar/lib/index.d.ts | 250 +++++++++++++++++++++ .../node_modules/@electron/asar/lib/integrity.js | 62 +++++ desktop/node_modules/@electron/asar/lib/pickle.js | 230 +++++++++++++++++++ .../node_modules/@electron/asar/lib/wrapped-fs.js | 26 +++ desktop/node_modules/@electron/asar/package.json | 61 +++++ 12 files changed, 1491 insertions(+) create mode 100644 desktop/node_modules/@electron/asar/LICENSE.md create mode 100644 desktop/node_modules/@electron/asar/README.md create mode 100755 desktop/node_modules/@electron/asar/bin/asar.js create mode 100644 desktop/node_modules/@electron/asar/lib/asar.js create mode 100644 desktop/node_modules/@electron/asar/lib/crawlfs.js create mode 100644 desktop/node_modules/@electron/asar/lib/disk.js create mode 100644 desktop/node_modules/@electron/asar/lib/filesystem.js create mode 100644 desktop/node_modules/@electron/asar/lib/index.d.ts create mode 100644 desktop/node_modules/@electron/asar/lib/integrity.js create mode 100644 desktop/node_modules/@electron/asar/lib/pickle.js create mode 100644 desktop/node_modules/@electron/asar/lib/wrapped-fs.js create mode 100644 desktop/node_modules/@electron/asar/package.json (limited to 'desktop/node_modules/@electron/asar') diff --git a/desktop/node_modules/@electron/asar/LICENSE.md b/desktop/node_modules/@electron/asar/LICENSE.md new file mode 100644 index 0000000..4d231b4 --- /dev/null +++ b/desktop/node_modules/@electron/asar/LICENSE.md @@ -0,0 +1,20 @@ +Copyright (c) 2014 GitHub Inc. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/desktop/node_modules/@electron/asar/README.md b/desktop/node_modules/@electron/asar/README.md new file mode 100644 index 0000000..2c223d2 --- /dev/null +++ b/desktop/node_modules/@electron/asar/README.md @@ -0,0 +1,213 @@ +# @electron/asar - Electron Archive + +[![CircleCI build status](https://circleci.com/gh/electron/asar/tree/main.svg?style=shield)](https://circleci.com/gh/electron/asar/tree/main) +[![npm version](http://img.shields.io/npm/v/@electron/asar.svg)](https://npmjs.org/package/@electron/asar) + +Asar is a simple extensive archive format, it works like `tar` that concatenates +all files together without compression, while having random access support. + +## Features + +* Support random access +* Use JSON to store files' information +* Very easy to write a parser + +## Command line utility + +### Install + +This module requires Node 10 or later. + +```bash +$ npm install --engine-strict @electron/asar +``` + +### Usage + +```bash +$ asar --help + + Usage: asar [options] [command] + + Commands: + + pack|p + create asar archive + + list|l + list files of asar archive + + extract-file|ef + extract one file from archive + + extract|e + extract archive + + + Options: + + -h, --help output usage information + -V, --version output the version number + +``` + +#### Excluding multiple resources from being packed + +Given: +``` + app +(a) ├── x1 +(b) ├── x2 +(c) ├── y3 +(d) │   ├── x1 +(e) │   └── z1 +(f) │   └── x2 +(g) └── z4 +(h) └── w1 +``` + +Exclude: a, b +```bash +$ asar pack app app.asar --unpack-dir "{x1,x2}" +``` + +Exclude: a, b, d, f +```bash +$ asar pack app app.asar --unpack-dir "**/{x1,x2}" +``` + +Exclude: a, b, d, f, h +```bash +$ asar pack app app.asar --unpack-dir "{**/x1,**/x2,z4/w1}" +``` + +## Using programatically + +### Example + +```javascript +const asar = require('@electron/asar'); + +const src = 'some/path/'; +const dest = 'name.asar'; + +await asar.createPackage(src, dest); +console.log('done.'); +``` + +Please note that there is currently **no** error handling provided! + +### Transform +You can pass in a `transform` option, that is a function, which either returns +nothing, or a `stream.Transform`. The latter will be used on files that will be +in the `.asar` file to transform them (e.g. compress). + +```javascript +const asar = require('@electron/asar'); + +const src = 'some/path/'; +const dest = 'name.asar'; + +function transform (filename) { + return new CustomTransformStream() +} + +await asar.createPackageWithOptions(src, dest, { transform: transform }); +console.log('done.'); +``` + +## Using with grunt + +There is also an unofficial grunt plugin to generate asar archives at [bwin/grunt-asar][grunt-asar]. + +## Format + +Asar uses [Pickle][pickle] to safely serialize binary value to file. + +The format of asar is very flat: + +``` +| UInt32: header_size | String: header | Bytes: file1 | ... | Bytes: file42 | +``` + +The `header_size` and `header` are serialized with [Pickle][pickle] class, and +`header_size`'s [Pickle][pickle] object is 8 bytes. + +The `header` is a JSON string, and the `header_size` is the size of `header`'s +`Pickle` object. + +Structure of `header` is something like this: + +```json +{ + "files": { + "tmp": { + "files": {} + }, + "usr" : { + "files": { + "bin": { + "files": { + "ls": { + "offset": "0", + "size": 100, + "executable": true, + "integrity": { + "algorithm": "SHA256", + "hash": "...", + "blockSize": 1024, + "blocks": ["...", "..."] + } + }, + "cd": { + "offset": "100", + "size": 100, + "executable": true, + "integrity": { + "algorithm": "SHA256", + "hash": "...", + "blockSize": 1024, + "blocks": ["...", "..."] + } + } + } + } + } + }, + "etc": { + "files": { + "hosts": { + "offset": "200", + "size": 32, + "integrity": { + "algorithm": "SHA256", + "hash": "...", + "blockSize": 1024, + "blocks": ["...", "..."] + } + } + } + } + } +} +``` + +`offset` and `size` records the information to read the file from archive, the +`offset` starts from 0 so you have to manually add the size of `header_size` and +`header` to the `offset` to get the real offset of the file. + +`offset` is a UINT64 number represented in string, because there is no way to +precisely represent UINT64 in JavaScript `Number`. `size` is a JavaScript +`Number` that is no larger than `Number.MAX_SAFE_INTEGER`, which has a value of +`9007199254740991` and is about 8PB in size. We didn't store `size` in UINT64 +because file size in Node.js is represented as `Number` and it is not safe to +convert `Number` to UINT64. + +`integrity` is an object consisting of a few keys: +* A hashing `algorithm`, currently only `SHA256` is supported. +* A hex encoded `hash` value representing the hash of the entire file. +* An array of hex encoded hashes for the `blocks` of the file. i.e. for a blockSize of 4KB this array contains the hash of every block if you split the file into N 4KB blocks. +* A integer value `blockSize` representing the size in bytes of each block in the `blocks` hashes above + +[pickle]: https://chromium.googlesource.com/chromium/src/+/main/base/pickle.h +[grunt-asar]: https://github.com/bwin/grunt-asar diff --git a/desktop/node_modules/@electron/asar/bin/asar.js b/desktop/node_modules/@electron/asar/bin/asar.js new file mode 100755 index 0000000..c3d094e --- /dev/null +++ b/desktop/node_modules/@electron/asar/bin/asar.js @@ -0,0 +1,82 @@ +#!/usr/bin/env node + +var packageJSON = require('../package.json') +var splitVersion = function (version) { return version.split('.').map(function (part) { return Number(part) }) } +var requiredNodeVersion = splitVersion(packageJSON.engines.node.slice(2)) +var actualNodeVersion = splitVersion(process.versions.node) + +if (actualNodeVersion[0] < requiredNodeVersion[0] || (actualNodeVersion[0] === requiredNodeVersion[0] && actualNodeVersion[1] < requiredNodeVersion[1])) { + console.error('CANNOT RUN WITH NODE ' + process.versions.node) + console.error('asar requires Node ' + packageJSON.engines.node + '.') + process.exit(1) +} + +// Not consts so that this file can load in Node < 4.0 +var asar = require('../lib/asar') +var program = require('commander') + +program.version('v' + packageJSON.version) + .description('Manipulate asar archive files') + +program.command('pack ') + .alias('p') + .description('create asar archive') + .option('--ordering ', 'path to a text file for ordering contents') + .option('--unpack ', 'do not pack files matching glob ') + .option('--unpack-dir ', 'do not pack dirs matching glob or starting with literal ') + .option('--exclude-hidden', 'exclude hidden files') + .action(function (dir, output, options) { + options = { + unpack: options.unpack, + unpackDir: options.unpackDir, + ordering: options.ordering, + version: options.sv, + arch: options.sa, + builddir: options.sb, + dot: !options.excludeHidden + } + asar.createPackageWithOptions(dir, output, options).catch(error => { + console.error(error) + process.exit(1) + }) + }) + +program.command('list ') + .alias('l') + .description('list files of asar archive') + .option('-i, --is-pack', 'each file in the asar is pack or unpack') + .action(function (archive, options) { + options = { + isPack: options.isPack + } + var files = asar.listPackage(archive, options) + for (var i in files) { + console.log(files[i]) + } + }) + +program.command('extract-file ') + .alias('ef') + .description('extract one file from archive') + .action(function (archive, filename) { + require('fs').writeFileSync(require('path').basename(filename), + asar.extractFile(archive, filename)) + }) + +program.command('extract ') + .alias('e') + .description('extract archive') + .action(function (archive, dest) { + asar.extractAll(archive, dest) + }) + +program.command('*') + .action(function (_cmd, args) { + console.log('asar: \'%s\' is not an asar command. See \'asar --help\'.', args[0]) + }) + +program.parse(process.argv) + +if (program.args.length === 0) { + program.help() +} diff --git a/desktop/node_modules/@electron/asar/lib/asar.js b/desktop/node_modules/@electron/asar/lib/asar.js new file mode 100644 index 0000000..a0c4563 --- /dev/null +++ b/desktop/node_modules/@electron/asar/lib/asar.js @@ -0,0 +1,229 @@ +'use strict' + +const fs = require('./wrapped-fs') +const path = require('path') +const minimatch = require('minimatch') + +const Filesystem = require('./filesystem') +const disk = require('./disk') +const crawlFilesystem = require('./crawlfs') + +/** + * Whether a directory should be excluded from packing due to the `--unpack-dir" option. + * + * @param {string} dirPath - directory path to check + * @param {string} pattern - literal prefix [for backward compatibility] or glob pattern + * @param {array} unpackDirs - Array of directory paths previously marked as unpacked + */ +function isUnpackedDir (dirPath, pattern, unpackDirs) { + if (dirPath.startsWith(pattern) || minimatch(dirPath, pattern)) { + if (!unpackDirs.includes(dirPath)) { + unpackDirs.push(dirPath) + } + return true + } else { + return unpackDirs.some(unpackDir => dirPath.startsWith(unpackDir)) + } +} + +module.exports.createPackage = async function (src, dest) { + return module.exports.createPackageWithOptions(src, dest, {}) +} + +module.exports.createPackageWithOptions = async function (src, dest, options) { + const globOptions = options.globOptions ? options.globOptions : {} + globOptions.dot = options.dot === undefined ? true : options.dot + + const pattern = src + (options.pattern ? options.pattern : '/**/*') + + const [filenames, metadata] = await crawlFilesystem(pattern, globOptions) + return module.exports.createPackageFromFiles(src, dest, filenames, metadata, options) +} + +/** + * Create an ASAR archive from a list of filenames. + * + * @param {string} src: Base path. All files are relative to this. + * @param {string} dest: Archive filename (& path). + * @param {array} filenames: List of filenames relative to src. + * @param {object} metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional) + * @param {object} options: Options passed to `createPackageWithOptions`. +*/ +module.exports.createPackageFromFiles = async function (src, dest, filenames, metadata, options) { + if (typeof metadata === 'undefined' || metadata === null) { metadata = {} } + if (typeof options === 'undefined' || options === null) { options = {} } + + src = path.normalize(src) + dest = path.normalize(dest) + filenames = filenames.map(function (filename) { return path.normalize(filename) }) + + const filesystem = new Filesystem(src) + const files = [] + const unpackDirs = [] + + let filenamesSorted = [] + if (options.ordering) { + const orderingFiles = (await fs.readFile(options.ordering)).toString().split('\n').map(line => { + if (line.includes(':')) { line = line.split(':').pop() } + line = line.trim() + if (line.startsWith('/')) { line = line.slice(1) } + return line + }) + + const ordering = [] + for (const file of orderingFiles) { + const pathComponents = file.split(path.sep) + let str = src + for (const pathComponent of pathComponents) { + str = path.join(str, pathComponent) + ordering.push(str) + } + } + + let missing = 0 + const total = filenames.length + + for (const file of ordering) { + if (!filenamesSorted.includes(file) && filenames.includes(file)) { + filenamesSorted.push(file) + } + } + + for (const file of filenames) { + if (!filenamesSorted.includes(file)) { + filenamesSorted.push(file) + missing += 1 + } + } + + console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`) + } else { + filenamesSorted = filenames + } + + const handleFile = async function (filename) { + if (!metadata[filename]) { + metadata[filename] = await crawlFilesystem.determineFileType(filename) + } + const file = metadata[filename] + + let shouldUnpack + switch (file.type) { + case 'directory': + if (options.unpackDir) { + shouldUnpack = isUnpackedDir(path.relative(src, filename), options.unpackDir, unpackDirs) + } else { + shouldUnpack = false + } + filesystem.insertDirectory(filename, shouldUnpack) + break + case 'file': + shouldUnpack = false + if (options.unpack) { + shouldUnpack = minimatch(filename, options.unpack, { matchBase: true }) + } + if (!shouldUnpack && options.unpackDir) { + const dirName = path.relative(src, path.dirname(filename)) + shouldUnpack = isUnpackedDir(dirName, options.unpackDir, unpackDirs) + } + files.push({ filename: filename, unpack: shouldUnpack }) + return filesystem.insertFile(filename, shouldUnpack, file, options) + case 'link': + filesystem.insertLink(filename) + break + } + return Promise.resolve() + } + + const insertsDone = async function () { + await fs.mkdirp(path.dirname(dest)) + return disk.writeFilesystem(dest, filesystem, files, metadata) + } + + const names = filenamesSorted.slice() + + const next = async function (name) { + if (!name) { return insertsDone() } + + await handleFile(name) + return next(names.shift()) + } + + return next(names.shift()) +} + +module.exports.statFile = function (archive, filename, followLinks) { + const filesystem = disk.readFilesystemSync(archive) + return filesystem.getFile(filename, followLinks) +} + +module.exports.getRawHeader = function (archive) { + return disk.readArchiveHeaderSync(archive) +} + +module.exports.listPackage = function (archive, options) { + return disk.readFilesystemSync(archive).listFiles(options) +} + +module.exports.extractFile = function (archive, filename) { + const filesystem = disk.readFilesystemSync(archive) + return disk.readFileSync(filesystem, filename, filesystem.getFile(filename)) +} + +module.exports.extractAll = function (archive, dest) { + const filesystem = disk.readFilesystemSync(archive) + const filenames = filesystem.listFiles() + + // under windows just extract links as regular files + const followLinks = process.platform === 'win32' + + // create destination directory + fs.mkdirpSync(dest) + + const extractionErrors = [] + for (const fullPath of filenames) { + // Remove leading slash + const filename = fullPath.substr(1) + const destFilename = path.join(dest, filename) + const file = filesystem.getFile(filename, followLinks) + if (file.files) { + // it's a directory, create it and continue with the next entry + fs.mkdirpSync(destFilename) + } else if (file.link) { + // it's a symlink, create a symlink + const linkSrcPath = path.dirname(path.join(dest, file.link)) + const linkDestPath = path.dirname(destFilename) + const relativePath = path.relative(linkDestPath, linkSrcPath) + // try to delete output file, because we can't overwrite a link + try { + fs.unlinkSync(destFilename) + } catch {} + const linkTo = path.join(relativePath, path.basename(file.link)) + fs.symlinkSync(linkTo, destFilename) + } else { + // it's a file, try to extract it + try { + const content = disk.readFileSync(filesystem, filename, file) + fs.writeFileSync(destFilename, content) + if (file.executable) { + fs.chmodSync(destFilename, '755') + } + } catch (e) { + extractionErrors.push(e) + } + } + } + if (extractionErrors.length) { + throw new Error( + 'Unable to extract some files:\n\n' + + extractionErrors.map(error => error.stack).join('\n\n')) + } +} + +module.exports.uncache = function (archive) { + return disk.uncacheFilesystem(archive) +} + +module.exports.uncacheAll = function () { + disk.uncacheAll() +} diff --git a/desktop/node_modules/@electron/asar/lib/crawlfs.js b/desktop/node_modules/@electron/asar/lib/crawlfs.js new file mode 100644 index 0000000..a26c3eb --- /dev/null +++ b/desktop/node_modules/@electron/asar/lib/crawlfs.js @@ -0,0 +1,41 @@ +'use strict' + +const { promisify } = require('util') + +const fs = require('./wrapped-fs') +const glob = promisify(require('glob')) + +async function determineFileType (filename) { + const stat = await fs.lstat(filename) + if (stat.isFile()) { + return { type: 'file', stat } + } else if (stat.isDirectory()) { + return { type: 'directory', stat } + } else if (stat.isSymbolicLink()) { + return { type: 'link', stat } + } +} + +module.exports = async function (dir, options) { + const metadata = {} + const crawled = await glob(dir, options) + const results = await Promise.all(crawled.map(async filename => [filename, await determineFileType(filename)])) + const links = [] + const filenames = results.map(([filename, type]) => { + if (type) { + metadata[filename] = type + if (type.type === 'link') links.push(filename) + } + return filename + }).filter((filename) => { + // Newer glob can return files inside symlinked directories, to avoid + // those appearing in archives we need to manually exclude theme here + const exactLinkIndex = links.findIndex(link => filename === link) + return links.every((link, index) => { + if (index === exactLinkIndex) return true + return !filename.startsWith(link) + }) + }) + return [filenames, metadata] +} +module.exports.determineFileType = determineFileType diff --git a/desktop/node_modules/@electron/asar/lib/disk.js b/desktop/node_modules/@electron/asar/lib/disk.js new file mode 100644 index 0000000..ad06182 --- /dev/null +++ b/desktop/node_modules/@electron/asar/lib/disk.js @@ -0,0 +1,123 @@ +'use strict' + +const fs = require('./wrapped-fs') +const path = require('path') +const pickle = require('./pickle') + +const Filesystem = require('./filesystem') +let filesystemCache = {} + +async function copyFile (dest, src, filename) { + const srcFile = path.join(src, filename) + const targetFile = path.join(dest, filename) + + const [content, stats] = await Promise.all([fs.readFile(srcFile), fs.stat(srcFile), fs.mkdirp(path.dirname(targetFile))]) + return fs.writeFile(targetFile, content, { mode: stats.mode }) +} + +async function streamTransformedFile (originalFilename, outStream, transformed) { + return new Promise((resolve, reject) => { + const stream = fs.createReadStream(transformed ? transformed.path : originalFilename) + stream.pipe(outStream, { end: false }) + stream.on('error', reject) + stream.on('end', () => resolve()) + }) +} + +const writeFileListToStream = async function (dest, filesystem, out, list, metadata) { + for (const file of list) { + if (file.unpack) { // the file should not be packed into archive + const filename = path.relative(filesystem.src, file.filename) + await copyFile(`${dest}.unpacked`, filesystem.src, filename) + } else { + await streamTransformedFile(file.filename, out, metadata[file.filename].transformed) + } + } + return out.end() +} + +module.exports.writeFilesystem = async function (dest, filesystem, files, metadata) { + const headerPickle = pickle.createEmpty() + headerPickle.writeString(JSON.stringify(filesystem.header)) + const headerBuf = headerPickle.toBuffer() + + const sizePickle = pickle.createEmpty() + sizePickle.writeUInt32(headerBuf.length) + const sizeBuf = sizePickle.toBuffer() + + const out = fs.createWriteStream(dest) + await new Promise((resolve, reject) => { + out.on('error', reject) + out.write(sizeBuf) + return out.write(headerBuf, () => resolve()) + }) + return writeFileListToStream(dest, filesystem, out, files, metadata) +} + +module.exports.readArchiveHeaderSync = function (archive) { + const fd = fs.openSync(archive, 'r') + let size + let headerBuf + try { + const sizeBuf = Buffer.alloc(8) + if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) { + throw new Error('Unable to read header size') + } + + const sizePickle = pickle.createFromBuffer(sizeBuf) + size = sizePickle.createIterator().readUInt32() + headerBuf = Buffer.alloc(size) + if (fs.readSync(fd, headerBuf, 0, size, null) !== size) { + throw new Error('Unable to read header') + } + } finally { + fs.closeSync(fd) + } + + const headerPickle = pickle.createFromBuffer(headerBuf) + const header = headerPickle.createIterator().readString() + return { headerString: header, header: JSON.parse(header), headerSize: size } +} + +module.exports.readFilesystemSync = function (archive) { + if (!filesystemCache[archive]) { + const header = this.readArchiveHeaderSync(archive) + const filesystem = new Filesystem(archive) + filesystem.header = header.header + filesystem.headerSize = header.headerSize + filesystemCache[archive] = filesystem + } + return filesystemCache[archive] +} + +module.exports.uncacheFilesystem = function (archive) { + if (filesystemCache[archive]) { + filesystemCache[archive] = undefined + return true + } + return false +} + +module.exports.uncacheAll = function () { + filesystemCache = {} +} + +module.exports.readFileSync = function (filesystem, filename, info) { + let buffer = Buffer.alloc(info.size) + if (info.size <= 0) { return buffer } + if (info.unpacked) { + // it's an unpacked file, copy it. + buffer = fs.readFileSync(path.join(`${filesystem.src}.unpacked`, filename)) + } else { + // Node throws an exception when reading 0 bytes into a 0-size buffer, + // so we short-circuit the read in this case. + const fd = fs.openSync(filesystem.src, 'r') + try { + const offset = 8 + filesystem.headerSize + parseInt(info.offset) + fs.readSync(fd, buffer, 0, info.size, offset) + } finally { + fs.closeSync(fd) + } + } + return buffer +} diff --git a/desktop/node_modules/@electron/asar/lib/filesystem.js b/desktop/node_modules/@electron/asar/lib/filesystem.js new file mode 100644 index 0000000..ce10245 --- /dev/null +++ b/desktop/node_modules/@electron/asar/lib/filesystem.js @@ -0,0 +1,154 @@ +'use strict' + +const fs = require('./wrapped-fs') +const os = require('os') +const path = require('path') +const { promisify } = require('util') +const stream = require('stream') +const getFileIntegrity = require('./integrity') + +const UINT32_MAX = 2 ** 32 - 1 + +const pipeline = promisify(stream.pipeline) + +class Filesystem { + constructor (src) { + this.src = path.resolve(src) + this.header = { files: Object.create(null) } + this.offset = BigInt(0) + } + + searchNodeFromDirectory (p) { + let json = this.header + const dirs = p.split(path.sep) + for (const dir of dirs) { + if (dir !== '.') { + if (!json.files[dir]) { + json.files[dir] = { files: Object.create(null) } + } + json = json.files[dir] + } + } + return json + } + + searchNodeFromPath (p) { + p = path.relative(this.src, p) + if (!p) { return this.header } + const name = path.basename(p) + const node = this.searchNodeFromDirectory(path.dirname(p)) + if (node.files == null) { + node.files = Object.create(null) + } + if (node.files[name] == null) { + node.files[name] = Object.create(null) + } + return node.files[name] + } + + insertDirectory (p, shouldUnpack) { + const node = this.searchNodeFromPath(p) + if (shouldUnpack) { + node.unpacked = shouldUnpack + } + node.files = node.files || Object.create(null) + return node.files + } + + async insertFile (p, shouldUnpack, file, options) { + const dirNode = this.searchNodeFromPath(path.dirname(p)) + const node = this.searchNodeFromPath(p) + if (shouldUnpack || dirNode.unpacked) { + node.size = file.stat.size + node.unpacked = true + node.integrity = await getFileIntegrity(p) + return Promise.resolve() + } + + let size + + const transformed = options.transform && options.transform(p) + if (transformed) { + const tmpdir = await fs.mkdtemp(path.join(os.tmpdir(), 'asar-')) + const tmpfile = path.join(tmpdir, path.basename(p)) + const out = fs.createWriteStream(tmpfile) + const readStream = fs.createReadStream(p) + + await pipeline(readStream, transformed, out) + file.transformed = { + path: tmpfile, + stat: await fs.lstat(tmpfile) + } + size = file.transformed.stat.size + } else { + size = file.stat.size + } + + // JavaScript cannot precisely present integers >= UINT32_MAX. + if (size > UINT32_MAX) { + throw new Error(`${p}: file size can not be larger than 4.2GB`) + } + + node.size = size + node.offset = this.offset.toString() + node.integrity = await getFileIntegrity(p) + if (process.platform !== 'win32' && (file.stat.mode & 0o100)) { + node.executable = true + } + this.offset += BigInt(size) + } + + insertLink (p) { + const link = path.relative(fs.realpathSync(this.src), fs.realpathSync(p)) + if (link.substr(0, 2) === '..') { + throw new Error(`${p}: file "${link}" links out of the package`) + } + const node = this.searchNodeFromPath(p) + node.link = link + return link + } + + listFiles (options) { + const files = [] + + const fillFilesFromMetadata = function (basePath, metadata) { + if (!metadata.files) { + return + } + + for (const [childPath, childMetadata] of Object.entries(metadata.files)) { + const fullPath = path.join(basePath, childPath) + const packState = childMetadata.unpacked ? 'unpack' : 'pack ' + files.push((options && options.isPack) ? `${packState} : ${fullPath}` : fullPath) + fillFilesFromMetadata(fullPath, childMetadata) + } + } + + fillFilesFromMetadata('/', this.header) + return files + } + + getNode (p) { + const node = this.searchNodeFromDirectory(path.dirname(p)) + const name = path.basename(p) + if (name) { + return node.files[name] + } else { + return node + } + } + + getFile (p, followLinks) { + followLinks = typeof followLinks === 'undefined' ? true : followLinks + const info = this.getNode(p) + + // if followLinks is false we don't resolve symlinks + if (info.link && followLinks) { + return this.getFile(info.link) + } else { + return info + } + } +} + +module.exports = Filesystem diff --git a/desktop/node_modules/@electron/asar/lib/index.d.ts b/desktop/node_modules/@electron/asar/lib/index.d.ts new file mode 100644 index 0000000..b79528b --- /dev/null +++ b/desktop/node_modules/@electron/asar/lib/index.d.ts @@ -0,0 +1,250 @@ +import { Stats } from "fs"; + +interface IMinimatchOptions { + /** + * Dump a ton of stuff to stderr. + * + * @default false + */ + debug?: boolean | undefined; + + /** + * Do not expand `{a,b}` and `{1..3}` brace sets. + * + * @default false + */ + nobrace?: boolean | undefined; + + /** + * Disable `**` matching against multiple folder names. + * + * @default false + */ + noglobstar?: boolean | undefined; + + /** + * Allow patterns to match filenames starting with a period, + * even if the pattern does not explicitly have a period in that spot. + * + * Note that by default, `'a/**' + '/b'` will **not** match `a/.d/b`, unless `dot` is set. + * + * @default false + */ + dot?: boolean | undefined; + + /** + * Disable "extglob" style patterns like `+(a|b)`. + * + * @default false + */ + noext?: boolean | undefined; + + /** + * Perform a case-insensitive match. + * + * @default false + */ + nocase?: boolean | undefined; + + /** + * When a match is not found by `minimatch.match`, + * return a list containing the pattern itself if this option is set. + * Otherwise, an empty list is returned if there are no matches. + * + * @default false + */ + nonull?: boolean | undefined; + + /** + * If set, then patterns without slashes will be matched + * against the basename of the path if it contains slashes. For example, + * `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + * + * @default false + */ + matchBase?: boolean | undefined; + + /** + * Suppress the behavior of treating `#` at the start of a pattern as a comment. + * + * @default false + */ + nocomment?: boolean | undefined; + + /** + * Suppress the behavior of treating a leading `!` character as negation. + * + * @default false + */ + nonegate?: boolean | undefined; + + /** + * Returns from negate expressions the same as if they were not negated. + * (Ie, true on a hit, false on a miss.) + * + * @default false + */ + flipNegate?: boolean | undefined; + + /** + * Compare a partial path to a pattern. As long as the parts of the path that + * are present are not contradicted by the pattern, it will be treated as a + * match. This is useful in applications where you're walking through a + * folder structure, and don't yet have the full path, but want to ensure that + * you do not walk down paths that can never be a match. + * + * @default false + * + * @example + * import minimatch = require("minimatch"); + * + * minimatch('/a/b', '/a/*' + '/c/d', { partial: true }) // true, might be /a/b/c/d + * minimatch('/a/b', '/**' + '/d', { partial: true }) // true, might be /a/b/.../d + * minimatch('/x/y/z', '/a/**' + '/z', { partial: true }) // false, because x !== a + */ + partial?: boolean; + + /** + * Use `\\` as a path separator _only_, and _never_ as an escape + * character. If set, all `\\` characters are replaced with `/` in + * the pattern. Note that this makes it **impossible** to match + * against paths containing literal glob pattern characters, but + * allows matching with patterns constructed using `path.join()` and + * `path.resolve()` on Windows platforms, mimicking the (buggy!) + * behavior of earlier versions on Windows. Please use with + * caution, and be mindful of the caveat about Windows paths + * + * For legacy reasons, this is also set if + * `options.allowWindowsEscape` is set to the exact value `false`. + * + * @default false + */ + windowsPathsNoEscape?: boolean; +} + +import fs = require("fs"); +interface IGlobOptions extends IMinimatchOptions { + cwd?: string | undefined; + root?: string | undefined; + dot?: boolean | undefined; + nomount?: boolean | undefined; + mark?: boolean | undefined; + nosort?: boolean | undefined; + stat?: boolean | undefined; + silent?: boolean | undefined; + strict?: boolean | undefined; + cache?: + | { [path: string]: boolean | "DIR" | "FILE" | ReadonlyArray } + | undefined; + statCache?: + | { [path: string]: false | { isDirectory(): boolean } | undefined } + | undefined; + symlinks?: { [path: string]: boolean | undefined } | undefined; + realpathCache?: { [path: string]: string } | undefined; + sync?: boolean | undefined; + nounique?: boolean | undefined; + nonull?: boolean | undefined; + debug?: boolean | undefined; + nobrace?: boolean | undefined; + noglobstar?: boolean | undefined; + noext?: boolean | undefined; + nocase?: boolean | undefined; + matchBase?: any; + nodir?: boolean | undefined; + ignore?: string | ReadonlyArray | undefined; + follow?: boolean | undefined; + realpath?: boolean | undefined; + nonegate?: boolean | undefined; + nocomment?: boolean | undefined; + absolute?: boolean | undefined; + allowWindowsEscape?: boolean | undefined; + fs?: typeof fs; +} + +export type CreateOptions = { + dot?: boolean; + globOptions?: IGlobOptions; + ordering?: string; + pattern?: string; + transform?: (filePath: string) => NodeJS.ReadWriteStream | void; + unpack?: string; + unpackDir?: string; +}; + +export type ListOptions = { + isPack: boolean; +}; + +export type EntryMetadata = { + unpacked: boolean; +}; + +export type DirectoryMetadata = EntryMetadata & { + files: { [property: string]: EntryMetadata }; +}; + +export type FileMetadata = EntryMetadata & { + executable?: true; + offset?: number; + size?: number; +}; + +export type LinkMetadata = { + link: string; +}; + +export type Metadata = DirectoryMetadata | FileMetadata | LinkMetadata; + +export type InputMetadataType = 'directory' | 'file' | 'link'; + +export type InputMetadata = { + [property: string]: { + type: InputMetadataType; + stat: Stats; + } +}; + +export type DirectoryRecord = { + files: Record; +}; + +export type FileRecord = { + offset: string; + size: number; + executable?: boolean; + integrity: { + hash: string; + algorithm: 'SHA256'; + blocks: string[]; + blockSize: number; + }; +} + +export type ArchiveHeader = { + // The JSON parsed header string + header: DirectoryRecord; + headerString: string; + headerSize: number; +} + +export function createPackage(src: string, dest: string): Promise; +export function createPackageWithOptions( + src: string, + dest: string, + options: CreateOptions +): Promise; +export function createPackageFromFiles( + src: string, + dest: string, + filenames: string[], + metadata?: InputMetadata, + options?: CreateOptions +): Promise; + +export function statFile(archive: string, filename: string, followLinks?: boolean): Metadata; +export function getRawHeader(archive: string): ArchiveHeader; +export function listPackage(archive: string, options?: ListOptions): string[]; +export function extractFile(archive: string, filename: string): Buffer; +export function extractAll(archive: string, dest: string): void; +export function uncache(archive: string): boolean; +export function uncacheAll(): void; diff --git a/desktop/node_modules/@electron/asar/lib/integrity.js b/desktop/node_modules/@electron/asar/lib/integrity.js new file mode 100644 index 0000000..6fabee4 --- /dev/null +++ b/desktop/node_modules/@electron/asar/lib/integrity.js @@ -0,0 +1,62 @@ +const crypto = require('crypto') +const fs = require('fs') +const stream = require('stream') +const { promisify } = require('util') + +const ALGORITHM = 'SHA256' +// 4MB default block size +const BLOCK_SIZE = 4 * 1024 * 1024 + +const pipeline = promisify(stream.pipeline) + +function hashBlock (block) { + return crypto.createHash(ALGORITHM).update(block).digest('hex') +} + +async function getFileIntegrity (path) { + const fileHash = crypto.createHash(ALGORITHM) + + const blocks = [] + let currentBlockSize = 0 + let currentBlock = [] + + await pipeline( + fs.createReadStream(path), + new stream.PassThrough({ + decodeStrings: false, + transform (_chunk, encoding, callback) { + fileHash.update(_chunk) + + function handleChunk (chunk) { + const diffToSlice = Math.min(BLOCK_SIZE - currentBlockSize, chunk.byteLength) + currentBlockSize += diffToSlice + currentBlock.push(chunk.slice(0, diffToSlice)) + if (currentBlockSize === BLOCK_SIZE) { + blocks.push(hashBlock(Buffer.concat(currentBlock))) + currentBlock = [] + currentBlockSize = 0 + } + if (diffToSlice < chunk.byteLength) { + handleChunk(chunk.slice(diffToSlice)) + } + } + handleChunk(_chunk) + callback() + }, + flush (callback) { + blocks.push(hashBlock(Buffer.concat(currentBlock))) + currentBlock = [] + callback() + } + }) + ) + + return { + algorithm: ALGORITHM, + hash: fileHash.digest('hex'), + blockSize: BLOCK_SIZE, + blocks: blocks + } +} + +module.exports = getFileIntegrity diff --git a/desktop/node_modules/@electron/asar/lib/pickle.js b/desktop/node_modules/@electron/asar/lib/pickle.js new file mode 100644 index 0000000..6943655 --- /dev/null +++ b/desktop/node_modules/@electron/asar/lib/pickle.js @@ -0,0 +1,230 @@ +// sizeof(T). +const SIZE_INT32 = 4 +const SIZE_UINT32 = 4 +const SIZE_INT64 = 8 +const SIZE_UINT64 = 8 +const SIZE_FLOAT = 4 +const SIZE_DOUBLE = 8 + +// The allocation granularity of the payload. +const PAYLOAD_UNIT = 64 + +// Largest JS number. +const CAPACITY_READ_ONLY = 9007199254740992 + +// Aligns 'i' by rounding it up to the next multiple of 'alignment'. +const alignInt = function (i, alignment) { + return i + (alignment - (i % alignment)) % alignment +} + +// PickleIterator reads data from a Pickle. The Pickle object must remain valid +// while the PickleIterator object is in use. +const PickleIterator = (function () { + function PickleIterator (pickle) { + this.payload = pickle.header + this.payloadOffset = pickle.headerSize + this.readIndex = 0 + this.endIndex = pickle.getPayloadSize() + } + + PickleIterator.prototype.readBool = function () { + return this.readInt() !== 0 + } + + PickleIterator.prototype.readInt = function () { + return this.readBytes(SIZE_INT32, Buffer.prototype.readInt32LE) + } + + PickleIterator.prototype.readUInt32 = function () { + return this.readBytes(SIZE_UINT32, Buffer.prototype.readUInt32LE) + } + + PickleIterator.prototype.readInt64 = function () { + return this.readBytes(SIZE_INT64, Buffer.prototype.readInt64LE) + } + + PickleIterator.prototype.readUInt64 = function () { + return this.readBytes(SIZE_UINT64, Buffer.prototype.readUInt64LE) + } + + PickleIterator.prototype.readFloat = function () { + return this.readBytes(SIZE_FLOAT, Buffer.prototype.readFloatLE) + } + + PickleIterator.prototype.readDouble = function () { + return this.readBytes(SIZE_DOUBLE, Buffer.prototype.readDoubleLE) + } + + PickleIterator.prototype.readString = function () { + return this.readBytes(this.readInt()).toString() + } + + PickleIterator.prototype.readBytes = function (length, method) { + const readPayloadOffset = this.getReadPayloadOffsetAndAdvance(length) + if (method != null) { + return method.call(this.payload, readPayloadOffset, length) + } else { + return this.payload.slice(readPayloadOffset, readPayloadOffset + length) + } + } + + PickleIterator.prototype.getReadPayloadOffsetAndAdvance = function (length) { + if (length > this.endIndex - this.readIndex) { + this.readIndex = this.endIndex + throw new Error('Failed to read data with length of ' + length) + } + const readPayloadOffset = this.payloadOffset + this.readIndex + this.advance(length) + return readPayloadOffset + } + + PickleIterator.prototype.advance = function (size) { + const alignedSize = alignInt(size, SIZE_UINT32) + if (this.endIndex - this.readIndex < alignedSize) { + this.readIndex = this.endIndex + } else { + this.readIndex += alignedSize + } + } + + return PickleIterator +})() + +// This class provides facilities for basic binary value packing and unpacking. +// +// The Pickle class supports appending primitive values (ints, strings, etc.) +// to a pickle instance. The Pickle instance grows its internal memory buffer +// dynamically to hold the sequence of primitive values. The internal memory +// buffer is exposed as the "data" of the Pickle. This "data" can be passed +// to a Pickle object to initialize it for reading. +// +// When reading from a Pickle object, it is important for the consumer to know +// what value types to read and in what order to read them as the Pickle does +// not keep track of the type of data written to it. +// +// The Pickle's data has a header which contains the size of the Pickle's +// payload. It can optionally support additional space in the header. That +// space is controlled by the header_size parameter passed to the Pickle +// constructor. +const Pickle = (function () { + function Pickle (buffer) { + if (buffer) { + this.initFromBuffer(buffer) + } else { + this.initEmpty() + } + } + + Pickle.prototype.initEmpty = function () { + this.header = Buffer.alloc(0) + this.headerSize = SIZE_UINT32 + this.capacityAfterHeader = 0 + this.writeOffset = 0 + this.resize(PAYLOAD_UNIT) + this.setPayloadSize(0) + } + + Pickle.prototype.initFromBuffer = function (buffer) { + this.header = buffer + this.headerSize = buffer.length - this.getPayloadSize() + this.capacityAfterHeader = CAPACITY_READ_ONLY + this.writeOffset = 0 + if (this.headerSize > buffer.length) { + this.headerSize = 0 + } + if (this.headerSize !== alignInt(this.headerSize, SIZE_UINT32)) { + this.headerSize = 0 + } + if (this.headerSize === 0) { + this.header = Buffer.alloc(0) + } + } + + Pickle.prototype.createIterator = function () { + return new PickleIterator(this) + } + + Pickle.prototype.toBuffer = function () { + return this.header.slice(0, this.headerSize + this.getPayloadSize()) + } + + Pickle.prototype.writeBool = function (value) { + return this.writeInt(value ? 1 : 0) + } + + Pickle.prototype.writeInt = function (value) { + return this.writeBytes(value, SIZE_INT32, Buffer.prototype.writeInt32LE) + } + + Pickle.prototype.writeUInt32 = function (value) { + return this.writeBytes(value, SIZE_UINT32, Buffer.prototype.writeUInt32LE) + } + + Pickle.prototype.writeInt64 = function (value) { + return this.writeBytes(value, SIZE_INT64, Buffer.prototype.writeInt64LE) + } + + Pickle.prototype.writeUInt64 = function (value) { + return this.writeBytes(value, SIZE_UINT64, Buffer.prototype.writeUInt64LE) + } + + Pickle.prototype.writeFloat = function (value) { + return this.writeBytes(value, SIZE_FLOAT, Buffer.prototype.writeFloatLE) + } + + Pickle.prototype.writeDouble = function (value) { + return this.writeBytes(value, SIZE_DOUBLE, Buffer.prototype.writeDoubleLE) + } + + Pickle.prototype.writeString = function (value) { + const length = Buffer.byteLength(value, 'utf8') + if (!this.writeInt(length)) { + return false + } + return this.writeBytes(value, length) + } + + Pickle.prototype.setPayloadSize = function (payloadSize) { + return this.header.writeUInt32LE(payloadSize, 0) + } + + Pickle.prototype.getPayloadSize = function () { + return this.header.readUInt32LE(0) + } + + Pickle.prototype.writeBytes = function (data, length, method) { + const dataLength = alignInt(length, SIZE_UINT32) + const newSize = this.writeOffset + dataLength + if (newSize > this.capacityAfterHeader) { + this.resize(Math.max(this.capacityAfterHeader * 2, newSize)) + } + if (method != null) { + method.call(this.header, data, this.headerSize + this.writeOffset) + } else { + this.header.write(data, this.headerSize + this.writeOffset, length) + } + const endOffset = this.headerSize + this.writeOffset + length + this.header.fill(0, endOffset, endOffset + dataLength - length) + this.setPayloadSize(newSize) + this.writeOffset = newSize + return true + } + + Pickle.prototype.resize = function (newCapacity) { + newCapacity = alignInt(newCapacity, PAYLOAD_UNIT) + this.header = Buffer.concat([this.header, Buffer.alloc(newCapacity)]) + this.capacityAfterHeader = newCapacity + } + + return Pickle +})() + +module.exports = { + createEmpty: function () { + return new Pickle() + }, + + createFromBuffer: function (buffer) { + return new Pickle(buffer) + } +} diff --git a/desktop/node_modules/@electron/asar/lib/wrapped-fs.js b/desktop/node_modules/@electron/asar/lib/wrapped-fs.js new file mode 100644 index 0000000..24f59d0 --- /dev/null +++ b/desktop/node_modules/@electron/asar/lib/wrapped-fs.js @@ -0,0 +1,26 @@ +'use strict' + +const fs = process.versions.electron ? require('original-fs') : require('fs') + +const promisifiedMethods = [ + 'lstat', + 'mkdtemp', + 'readFile', + 'stat', + 'writeFile' +] + +const promisified = {} + +for (const method of Object.keys(fs)) { + if (promisifiedMethods.includes(method)) { + promisified[method] = fs.promises[method] + } else { + promisified[method] = fs[method] + } +} +// To make it more like fs-extra +promisified.mkdirp = (dir) => fs.promises.mkdir(dir, { recursive: true }) +promisified.mkdirpSync = (dir) => fs.mkdirSync(dir, { recursive: true }) + +module.exports = promisified diff --git a/desktop/node_modules/@electron/asar/package.json b/desktop/node_modules/@electron/asar/package.json new file mode 100644 index 0000000..66b75d1 --- /dev/null +++ b/desktop/node_modules/@electron/asar/package.json @@ -0,0 +1,61 @@ +{ + "name": "@electron/asar", + "description": "Creating Electron app packages", + "version": "3.2.7", + "main": "./lib/asar.js", + "types": "./lib/index.d.ts", + "bin": { + "asar": "./bin/asar.js" + }, + "files": [ + "bin", + "lib", + "lib/index.d.ts" + ], + "engines": { + "node": ">=10.12.0" + }, + "license": "MIT", + "homepage": "https://github.com/electron/asar", + "repository": { + "type": "git", + "url": "https://github.com/electron/asar.git" + }, + "bugs": { + "url": "https://github.com/electron/asar/issues" + }, + "scripts": { + "mocha": "xvfb-maybe electron-mocha --reporter spec && mocha --reporter spec", + "test": "npm run lint && npm run mocha", + "lint": "tsd && standard", + "standard": "standard", + "tsd": "tsd" + }, + "standard": { + "env": { + "mocha": true + }, + "globals": [ + "BigInt" + ] + }, + "tsd": { + "directory": "test" + }, + "dependencies": { + "commander": "^5.0.0", + "glob": "^7.1.6", + "minimatch": "^3.0.4" + }, + "devDependencies": { + "@continuous-auth/semantic-release-npm": "^3.0.0", + "electron": "^22.0.0", + "electron-mocha": "^11.0.2", + "lodash": "^4.17.15", + "mocha": "^10.1.0", + "rimraf": "^3.0.2", + "standard": "^14.3.3", + "tsd": "^0.25.0", + "xvfb-maybe": "^0.2.1" + } +} -- cgit