summaryrefslogtreecommitdiff
path: root/desktop/node_modules/@electron/asar/lib
diff options
context:
space:
mode:
Diffstat (limited to 'desktop/node_modules/@electron/asar/lib')
-rw-r--r--desktop/node_modules/@electron/asar/lib/asar.js229
-rw-r--r--desktop/node_modules/@electron/asar/lib/crawlfs.js41
-rw-r--r--desktop/node_modules/@electron/asar/lib/disk.js123
-rw-r--r--desktop/node_modules/@electron/asar/lib/filesystem.js154
-rw-r--r--desktop/node_modules/@electron/asar/lib/index.d.ts250
-rw-r--r--desktop/node_modules/@electron/asar/lib/integrity.js62
-rw-r--r--desktop/node_modules/@electron/asar/lib/pickle.js230
-rw-r--r--desktop/node_modules/@electron/asar/lib/wrapped-fs.js26
8 files changed, 1115 insertions, 0 deletions
diff --git a/desktop/node_modules/@electron/asar/lib/asar.js b/desktop/node_modules/@electron/asar/lib/asar.js
new file mode 100644
index 0000000..a0c4563
--- /dev/null
+++ b/desktop/node_modules/@electron/asar/lib/asar.js
@@ -0,0 +1,229 @@
+'use strict'
+
+const fs = require('./wrapped-fs')
+const path = require('path')
+const minimatch = require('minimatch')
+
+const Filesystem = require('./filesystem')
+const disk = require('./disk')
+const crawlFilesystem = require('./crawlfs')
+
+/**
+ * Whether a directory should be excluded from packing due to the `--unpack-dir" option.
+ *
+ * @param {string} dirPath - directory path to check
+ * @param {string} pattern - literal prefix [for backward compatibility] or glob pattern
+ * @param {array} unpackDirs - Array of directory paths previously marked as unpacked
+ */
+function isUnpackedDir (dirPath, pattern, unpackDirs) {
+ if (dirPath.startsWith(pattern) || minimatch(dirPath, pattern)) {
+ if (!unpackDirs.includes(dirPath)) {
+ unpackDirs.push(dirPath)
+ }
+ return true
+ } else {
+ return unpackDirs.some(unpackDir => dirPath.startsWith(unpackDir))
+ }
+}
+
+module.exports.createPackage = async function (src, dest) {
+ return module.exports.createPackageWithOptions(src, dest, {})
+}
+
+module.exports.createPackageWithOptions = async function (src, dest, options) {
+ const globOptions = options.globOptions ? options.globOptions : {}
+ globOptions.dot = options.dot === undefined ? true : options.dot
+
+ const pattern = src + (options.pattern ? options.pattern : '/**/*')
+
+ const [filenames, metadata] = await crawlFilesystem(pattern, globOptions)
+ return module.exports.createPackageFromFiles(src, dest, filenames, metadata, options)
+}
+
+/**
+ * Create an ASAR archive from a list of filenames.
+ *
+ * @param {string} src: Base path. All files are relative to this.
+ * @param {string} dest: Archive filename (& path).
+ * @param {array} filenames: List of filenames relative to src.
+ * @param {object} metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
+ * @param {object} options: Options passed to `createPackageWithOptions`.
+*/
+module.exports.createPackageFromFiles = async function (src, dest, filenames, metadata, options) {
+ if (typeof metadata === 'undefined' || metadata === null) { metadata = {} }
+ if (typeof options === 'undefined' || options === null) { options = {} }
+
+ src = path.normalize(src)
+ dest = path.normalize(dest)
+ filenames = filenames.map(function (filename) { return path.normalize(filename) })
+
+ const filesystem = new Filesystem(src)
+ const files = []
+ const unpackDirs = []
+
+ let filenamesSorted = []
+ if (options.ordering) {
+ const orderingFiles = (await fs.readFile(options.ordering)).toString().split('\n').map(line => {
+ if (line.includes(':')) { line = line.split(':').pop() }
+ line = line.trim()
+ if (line.startsWith('/')) { line = line.slice(1) }
+ return line
+ })
+
+ const ordering = []
+ for (const file of orderingFiles) {
+ const pathComponents = file.split(path.sep)
+ let str = src
+ for (const pathComponent of pathComponents) {
+ str = path.join(str, pathComponent)
+ ordering.push(str)
+ }
+ }
+
+ let missing = 0
+ const total = filenames.length
+
+ for (const file of ordering) {
+ if (!filenamesSorted.includes(file) && filenames.includes(file)) {
+ filenamesSorted.push(file)
+ }
+ }
+
+ for (const file of filenames) {
+ if (!filenamesSorted.includes(file)) {
+ filenamesSorted.push(file)
+ missing += 1
+ }
+ }
+
+ console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`)
+ } else {
+ filenamesSorted = filenames
+ }
+
+ const handleFile = async function (filename) {
+ if (!metadata[filename]) {
+ metadata[filename] = await crawlFilesystem.determineFileType(filename)
+ }
+ const file = metadata[filename]
+
+ let shouldUnpack
+ switch (file.type) {
+ case 'directory':
+ if (options.unpackDir) {
+ shouldUnpack = isUnpackedDir(path.relative(src, filename), options.unpackDir, unpackDirs)
+ } else {
+ shouldUnpack = false
+ }
+ filesystem.insertDirectory(filename, shouldUnpack)
+ break
+ case 'file':
+ shouldUnpack = false
+ if (options.unpack) {
+ shouldUnpack = minimatch(filename, options.unpack, { matchBase: true })
+ }
+ if (!shouldUnpack && options.unpackDir) {
+ const dirName = path.relative(src, path.dirname(filename))
+ shouldUnpack = isUnpackedDir(dirName, options.unpackDir, unpackDirs)
+ }
+ files.push({ filename: filename, unpack: shouldUnpack })
+ return filesystem.insertFile(filename, shouldUnpack, file, options)
+ case 'link':
+ filesystem.insertLink(filename)
+ break
+ }
+ return Promise.resolve()
+ }
+
+ const insertsDone = async function () {
+ await fs.mkdirp(path.dirname(dest))
+ return disk.writeFilesystem(dest, filesystem, files, metadata)
+ }
+
+ const names = filenamesSorted.slice()
+
+ const next = async function (name) {
+ if (!name) { return insertsDone() }
+
+ await handleFile(name)
+ return next(names.shift())
+ }
+
+ return next(names.shift())
+}
+
+module.exports.statFile = function (archive, filename, followLinks) {
+ const filesystem = disk.readFilesystemSync(archive)
+ return filesystem.getFile(filename, followLinks)
+}
+
+module.exports.getRawHeader = function (archive) {
+ return disk.readArchiveHeaderSync(archive)
+}
+
+module.exports.listPackage = function (archive, options) {
+ return disk.readFilesystemSync(archive).listFiles(options)
+}
+
+module.exports.extractFile = function (archive, filename) {
+ const filesystem = disk.readFilesystemSync(archive)
+ return disk.readFileSync(filesystem, filename, filesystem.getFile(filename))
+}
+
+module.exports.extractAll = function (archive, dest) {
+ const filesystem = disk.readFilesystemSync(archive)
+ const filenames = filesystem.listFiles()
+
+ // under windows just extract links as regular files
+ const followLinks = process.platform === 'win32'
+
+ // create destination directory
+ fs.mkdirpSync(dest)
+
+ const extractionErrors = []
+ for (const fullPath of filenames) {
+ // Remove leading slash
+ const filename = fullPath.substr(1)
+ const destFilename = path.join(dest, filename)
+ const file = filesystem.getFile(filename, followLinks)
+ if (file.files) {
+ // it's a directory, create it and continue with the next entry
+ fs.mkdirpSync(destFilename)
+ } else if (file.link) {
+ // it's a symlink, create a symlink
+ const linkSrcPath = path.dirname(path.join(dest, file.link))
+ const linkDestPath = path.dirname(destFilename)
+ const relativePath = path.relative(linkDestPath, linkSrcPath)
+ // try to delete output file, because we can't overwrite a link
+ try {
+ fs.unlinkSync(destFilename)
+ } catch {}
+ const linkTo = path.join(relativePath, path.basename(file.link))
+ fs.symlinkSync(linkTo, destFilename)
+ } else {
+ // it's a file, try to extract it
+ try {
+ const content = disk.readFileSync(filesystem, filename, file)
+ fs.writeFileSync(destFilename, content)
+ if (file.executable) {
+ fs.chmodSync(destFilename, '755')
+ }
+ } catch (e) {
+ extractionErrors.push(e)
+ }
+ }
+ }
+ if (extractionErrors.length) {
+ throw new Error(
+ 'Unable to extract some files:\n\n' +
+ extractionErrors.map(error => error.stack).join('\n\n'))
+ }
+}
+
+module.exports.uncache = function (archive) {
+ return disk.uncacheFilesystem(archive)
+}
+
+module.exports.uncacheAll = function () {
+ disk.uncacheAll()
+}
diff --git a/desktop/node_modules/@electron/asar/lib/crawlfs.js b/desktop/node_modules/@electron/asar/lib/crawlfs.js
new file mode 100644
index 0000000..a26c3eb
--- /dev/null
+++ b/desktop/node_modules/@electron/asar/lib/crawlfs.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const { promisify } = require('util')
+
+const fs = require('./wrapped-fs')
+const glob = promisify(require('glob'))
+
+async function determineFileType (filename) {
+ const stat = await fs.lstat(filename)
+ if (stat.isFile()) {
+ return { type: 'file', stat }
+ } else if (stat.isDirectory()) {
+ return { type: 'directory', stat }
+ } else if (stat.isSymbolicLink()) {
+ return { type: 'link', stat }
+ }
+}
+
+module.exports = async function (dir, options) {
+ const metadata = {}
+ const crawled = await glob(dir, options)
+ const results = await Promise.all(crawled.map(async filename => [filename, await determineFileType(filename)]))
+ const links = []
+ const filenames = results.map(([filename, type]) => {
+ if (type) {
+ metadata[filename] = type
+ if (type.type === 'link') links.push(filename)
+ }
+ return filename
+ }).filter((filename) => {
+ // Newer glob can return files inside symlinked directories, to avoid
+ // those appearing in archives we need to manually exclude theme here
+ const exactLinkIndex = links.findIndex(link => filename === link)
+ return links.every((link, index) => {
+ if (index === exactLinkIndex) return true
+ return !filename.startsWith(link)
+ })
+ })
+ return [filenames, metadata]
+}
+module.exports.determineFileType = determineFileType
diff --git a/desktop/node_modules/@electron/asar/lib/disk.js b/desktop/node_modules/@electron/asar/lib/disk.js
new file mode 100644
index 0000000..ad06182
--- /dev/null
+++ b/desktop/node_modules/@electron/asar/lib/disk.js
@@ -0,0 +1,123 @@
+'use strict'
+
+const fs = require('./wrapped-fs')
+const path = require('path')
+const pickle = require('./pickle')
+
+const Filesystem = require('./filesystem')
+let filesystemCache = {}
+
+async function copyFile (dest, src, filename) {
+ const srcFile = path.join(src, filename)
+ const targetFile = path.join(dest, filename)
+
+ const [content, stats] = await Promise.all([fs.readFile(srcFile), fs.stat(srcFile), fs.mkdirp(path.dirname(targetFile))])
+ return fs.writeFile(targetFile, content, { mode: stats.mode })
+}
+
+async function streamTransformedFile (originalFilename, outStream, transformed) {
+ return new Promise((resolve, reject) => {
+ const stream = fs.createReadStream(transformed ? transformed.path : originalFilename)
+ stream.pipe(outStream, { end: false })
+ stream.on('error', reject)
+ stream.on('end', () => resolve())
+ })
+}
+
+const writeFileListToStream = async function (dest, filesystem, out, list, metadata) {
+ for (const file of list) {
+ if (file.unpack) { // the file should not be packed into archive
+ const filename = path.relative(filesystem.src, file.filename)
+ await copyFile(`${dest}.unpacked`, filesystem.src, filename)
+ } else {
+ await streamTransformedFile(file.filename, out, metadata[file.filename].transformed)
+ }
+ }
+ return out.end()
+}
+
+module.exports.writeFilesystem = async function (dest, filesystem, files, metadata) {
+ const headerPickle = pickle.createEmpty()
+ headerPickle.writeString(JSON.stringify(filesystem.header))
+ const headerBuf = headerPickle.toBuffer()
+
+ const sizePickle = pickle.createEmpty()
+ sizePickle.writeUInt32(headerBuf.length)
+ const sizeBuf = sizePickle.toBuffer()
+
+ const out = fs.createWriteStream(dest)
+ await new Promise((resolve, reject) => {
+ out.on('error', reject)
+ out.write(sizeBuf)
+ return out.write(headerBuf, () => resolve())
+ })
+ return writeFileListToStream(dest, filesystem, out, files, metadata)
+}
+
+module.exports.readArchiveHeaderSync = function (archive) {
+ const fd = fs.openSync(archive, 'r')
+ let size
+ let headerBuf
+ try {
+ const sizeBuf = Buffer.alloc(8)
+ if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
+ throw new Error('Unable to read header size')
+ }
+
+ const sizePickle = pickle.createFromBuffer(sizeBuf)
+ size = sizePickle.createIterator().readUInt32()
+ headerBuf = Buffer.alloc(size)
+ if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
+ throw new Error('Unable to read header')
+ }
+ } finally {
+ fs.closeSync(fd)
+ }
+
+ const headerPickle = pickle.createFromBuffer(headerBuf)
+ const header = headerPickle.createIterator().readString()
+ return { headerString: header, header: JSON.parse(header), headerSize: size }
+}
+
+module.exports.readFilesystemSync = function (archive) {
+ if (!filesystemCache[archive]) {
+ const header = this.readArchiveHeaderSync(archive)
+ const filesystem = new Filesystem(archive)
+ filesystem.header = header.header
+ filesystem.headerSize = header.headerSize
+ filesystemCache[archive] = filesystem
+ }
+ return filesystemCache[archive]
+}
+
+module.exports.uncacheFilesystem = function (archive) {
+ if (filesystemCache[archive]) {
+ filesystemCache[archive] = undefined
+ return true
+ }
+ return false
+}
+
+module.exports.uncacheAll = function () {
+ filesystemCache = {}
+}
+
+module.exports.readFileSync = function (filesystem, filename, info) {
+ let buffer = Buffer.alloc(info.size)
+ if (info.size <= 0) { return buffer }
+ if (info.unpacked) {
+ // it's an unpacked file, copy it.
+ buffer = fs.readFileSync(path.join(`${filesystem.src}.unpacked`, filename))
+ } else {
+ // Node throws an exception when reading 0 bytes into a 0-size buffer,
+ // so we short-circuit the read in this case.
+ const fd = fs.openSync(filesystem.src, 'r')
+ try {
+ const offset = 8 + filesystem.headerSize + parseInt(info.offset)
+ fs.readSync(fd, buffer, 0, info.size, offset)
+ } finally {
+ fs.closeSync(fd)
+ }
+ }
+ return buffer
+}
diff --git a/desktop/node_modules/@electron/asar/lib/filesystem.js b/desktop/node_modules/@electron/asar/lib/filesystem.js
new file mode 100644
index 0000000..ce10245
--- /dev/null
+++ b/desktop/node_modules/@electron/asar/lib/filesystem.js
@@ -0,0 +1,154 @@
+'use strict'
+
+const fs = require('./wrapped-fs')
+const os = require('os')
+const path = require('path')
+const { promisify } = require('util')
+const stream = require('stream')
+const getFileIntegrity = require('./integrity')
+
+const UINT32_MAX = 2 ** 32 - 1
+
+const pipeline = promisify(stream.pipeline)
+
+class Filesystem {
+ constructor (src) {
+ this.src = path.resolve(src)
+ this.header = { files: Object.create(null) }
+ this.offset = BigInt(0)
+ }
+
+ searchNodeFromDirectory (p) {
+ let json = this.header
+ const dirs = p.split(path.sep)
+ for (const dir of dirs) {
+ if (dir !== '.') {
+ if (!json.files[dir]) {
+ json.files[dir] = { files: Object.create(null) }
+ }
+ json = json.files[dir]
+ }
+ }
+ return json
+ }
+
+ searchNodeFromPath (p) {
+ p = path.relative(this.src, p)
+ if (!p) { return this.header }
+ const name = path.basename(p)
+ const node = this.searchNodeFromDirectory(path.dirname(p))
+ if (node.files == null) {
+ node.files = Object.create(null)
+ }
+ if (node.files[name] == null) {
+ node.files[name] = Object.create(null)
+ }
+ return node.files[name]
+ }
+
+ insertDirectory (p, shouldUnpack) {
+ const node = this.searchNodeFromPath(p)
+ if (shouldUnpack) {
+ node.unpacked = shouldUnpack
+ }
+ node.files = node.files || Object.create(null)
+ return node.files
+ }
+
+ async insertFile (p, shouldUnpack, file, options) {
+ const dirNode = this.searchNodeFromPath(path.dirname(p))
+ const node = this.searchNodeFromPath(p)
+ if (shouldUnpack || dirNode.unpacked) {
+ node.size = file.stat.size
+ node.unpacked = true
+ node.integrity = await getFileIntegrity(p)
+ return Promise.resolve()
+ }
+
+ let size
+
+ const transformed = options.transform && options.transform(p)
+ if (transformed) {
+ const tmpdir = await fs.mkdtemp(path.join(os.tmpdir(), 'asar-'))
+ const tmpfile = path.join(tmpdir, path.basename(p))
+ const out = fs.createWriteStream(tmpfile)
+ const readStream = fs.createReadStream(p)
+
+ await pipeline(readStream, transformed, out)
+ file.transformed = {
+ path: tmpfile,
+ stat: await fs.lstat(tmpfile)
+ }
+ size = file.transformed.stat.size
+ } else {
+ size = file.stat.size
+ }
+
+ // JavaScript cannot precisely present integers >= UINT32_MAX.
+ if (size > UINT32_MAX) {
+ throw new Error(`${p}: file size can not be larger than 4.2GB`)
+ }
+
+ node.size = size
+ node.offset = this.offset.toString()
+ node.integrity = await getFileIntegrity(p)
+ if (process.platform !== 'win32' && (file.stat.mode & 0o100)) {
+ node.executable = true
+ }
+ this.offset += BigInt(size)
+ }
+
+ insertLink (p) {
+ const link = path.relative(fs.realpathSync(this.src), fs.realpathSync(p))
+ if (link.substr(0, 2) === '..') {
+ throw new Error(`${p}: file "${link}" links out of the package`)
+ }
+ const node = this.searchNodeFromPath(p)
+ node.link = link
+ return link
+ }
+
+ listFiles (options) {
+ const files = []
+
+ const fillFilesFromMetadata = function (basePath, metadata) {
+ if (!metadata.files) {
+ return
+ }
+
+ for (const [childPath, childMetadata] of Object.entries(metadata.files)) {
+ const fullPath = path.join(basePath, childPath)
+ const packState = childMetadata.unpacked ? 'unpack' : 'pack '
+ files.push((options && options.isPack) ? `${packState} : ${fullPath}` : fullPath)
+ fillFilesFromMetadata(fullPath, childMetadata)
+ }
+ }
+
+ fillFilesFromMetadata('/', this.header)
+ return files
+ }
+
+ getNode (p) {
+ const node = this.searchNodeFromDirectory(path.dirname(p))
+ const name = path.basename(p)
+ if (name) {
+ return node.files[name]
+ } else {
+ return node
+ }
+ }
+
+ getFile (p, followLinks) {
+ followLinks = typeof followLinks === 'undefined' ? true : followLinks
+ const info = this.getNode(p)
+
+ // if followLinks is false we don't resolve symlinks
+ if (info.link && followLinks) {
+ return this.getFile(info.link)
+ } else {
+ return info
+ }
+ }
+}
+
+module.exports = Filesystem
diff --git a/desktop/node_modules/@electron/asar/lib/index.d.ts b/desktop/node_modules/@electron/asar/lib/index.d.ts
new file mode 100644
index 0000000..b79528b
--- /dev/null
+++ b/desktop/node_modules/@electron/asar/lib/index.d.ts
@@ -0,0 +1,250 @@
+import { Stats } from "fs";
+
+interface IMinimatchOptions {
+ /**
+ * Dump a ton of stuff to stderr.
+ *
+ * @default false
+ */
+ debug?: boolean | undefined;
+
+ /**
+ * Do not expand `{a,b}` and `{1..3}` brace sets.
+ *
+ * @default false
+ */
+ nobrace?: boolean | undefined;
+
+ /**
+ * Disable `**` matching against multiple folder names.
+ *
+ * @default false
+ */
+ noglobstar?: boolean | undefined;
+
+ /**
+ * Allow patterns to match filenames starting with a period,
+ * even if the pattern does not explicitly have a period in that spot.
+ *
+ * Note that by default, `'a/**' + '/b'` will **not** match `a/.d/b`, unless `dot` is set.
+ *
+ * @default false
+ */
+ dot?: boolean | undefined;
+
+ /**
+ * Disable "extglob" style patterns like `+(a|b)`.
+ *
+ * @default false
+ */
+ noext?: boolean | undefined;
+
+ /**
+ * Perform a case-insensitive match.
+ *
+ * @default false
+ */
+ nocase?: boolean | undefined;
+
+ /**
+ * When a match is not found by `minimatch.match`,
+ * return a list containing the pattern itself if this option is set.
+ * Otherwise, an empty list is returned if there are no matches.
+ *
+ * @default false
+ */
+ nonull?: boolean | undefined;
+
+ /**
+ * If set, then patterns without slashes will be matched
+ * against the basename of the path if it contains slashes. For example,
+ * `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
+ *
+ * @default false
+ */
+ matchBase?: boolean | undefined;
+
+ /**
+ * Suppress the behavior of treating `#` at the start of a pattern as a comment.
+ *
+ * @default false
+ */
+ nocomment?: boolean | undefined;
+
+ /**
+ * Suppress the behavior of treating a leading `!` character as negation.
+ *
+ * @default false
+ */
+ nonegate?: boolean | undefined;
+
+ /**
+ * Returns from negate expressions the same as if they were not negated.
+ * (Ie, true on a hit, false on a miss.)
+ *
+ * @default false
+ */
+ flipNegate?: boolean | undefined;
+
+ /**
+ * Compare a partial path to a pattern. As long as the parts of the path that
+ * are present are not contradicted by the pattern, it will be treated as a
+ * match. This is useful in applications where you're walking through a
+ * folder structure, and don't yet have the full path, but want to ensure that
+ * you do not walk down paths that can never be a match.
+ *
+ * @default false
+ *
+ * @example
+ * import minimatch = require("minimatch");
+ *
+ * minimatch('/a/b', '/a/*' + '/c/d', { partial: true }) // true, might be /a/b/c/d
+ * minimatch('/a/b', '/**' + '/d', { partial: true }) // true, might be /a/b/.../d
+ * minimatch('/x/y/z', '/a/**' + '/z', { partial: true }) // false, because x !== a
+ */
+ partial?: boolean;
+
+ /**
+ * Use `\\` as a path separator _only_, and _never_ as an escape
+ * character. If set, all `\\` characters are replaced with `/` in
+ * the pattern. Note that this makes it **impossible** to match
+ * against paths containing literal glob pattern characters, but
+ * allows matching with patterns constructed using `path.join()` and
+ * `path.resolve()` on Windows platforms, mimicking the (buggy!)
+ * behavior of earlier versions on Windows. Please use with
+ * caution, and be mindful of the caveat about Windows paths
+ *
+ * For legacy reasons, this is also set if
+ * `options.allowWindowsEscape` is set to the exact value `false`.
+ *
+ * @default false
+ */
+ windowsPathsNoEscape?: boolean;
+}
+
+import fs = require("fs");
+interface IGlobOptions extends IMinimatchOptions {
+ cwd?: string | undefined;
+ root?: string | undefined;
+ dot?: boolean | undefined;
+ nomount?: boolean | undefined;
+ mark?: boolean | undefined;
+ nosort?: boolean | undefined;
+ stat?: boolean | undefined;
+ silent?: boolean | undefined;
+ strict?: boolean | undefined;
+ cache?:
+ | { [path: string]: boolean | "DIR" | "FILE" | ReadonlyArray<string> }
+ | undefined;
+ statCache?:
+ | { [path: string]: false | { isDirectory(): boolean } | undefined }
+ | undefined;
+ symlinks?: { [path: string]: boolean | undefined } | undefined;
+ realpathCache?: { [path: string]: string } | undefined;
+ sync?: boolean | undefined;
+ nounique?: boolean | undefined;
+ nonull?: boolean | undefined;
+ debug?: boolean | undefined;
+ nobrace?: boolean | undefined;
+ noglobstar?: boolean | undefined;
+ noext?: boolean | undefined;
+ nocase?: boolean | undefined;
+ matchBase?: any;
+ nodir?: boolean | undefined;
+ ignore?: string | ReadonlyArray<string> | undefined;
+ follow?: boolean | undefined;
+ realpath?: boolean | undefined;
+ nonegate?: boolean | undefined;
+ nocomment?: boolean | undefined;
+ absolute?: boolean | undefined;
+ allowWindowsEscape?: boolean | undefined;
+ fs?: typeof fs;
+}
+
+export type CreateOptions = {
+ dot?: boolean;
+ globOptions?: IGlobOptions;
+ ordering?: string;
+ pattern?: string;
+ transform?: (filePath: string) => NodeJS.ReadWriteStream | void;
+ unpack?: string;
+ unpackDir?: string;
+};
+
+export type ListOptions = {
+ isPack: boolean;
+};
+
+export type EntryMetadata = {
+ unpacked: boolean;
+};
+
+export type DirectoryMetadata = EntryMetadata & {
+ files: { [property: string]: EntryMetadata };
+};
+
+export type FileMetadata = EntryMetadata & {
+ executable?: true;
+ offset?: number;
+ size?: number;
+};
+
+export type LinkMetadata = {
+ link: string;
+};
+
+export type Metadata = DirectoryMetadata | FileMetadata | LinkMetadata;
+
+export type InputMetadataType = 'directory' | 'file' | 'link';
+
+export type InputMetadata = {
+ [property: string]: {
+ type: InputMetadataType;
+ stat: Stats;
+ }
+};
+
+export type DirectoryRecord = {
+ files: Record<string, DirectoryRecord | FileRecord>;
+};
+
+export type FileRecord = {
+ offset: string;
+ size: number;
+ executable?: boolean;
+ integrity: {
+ hash: string;
+ algorithm: 'SHA256';
+ blocks: string[];
+ blockSize: number;
+ };
+}
+
+export type ArchiveHeader = {
+ // The JSON parsed header string
+ header: DirectoryRecord;
+ headerString: string;
+ headerSize: number;
+}
+
+export function createPackage(src: string, dest: string): Promise<void>;
+export function createPackageWithOptions(
+ src: string,
+ dest: string,
+ options: CreateOptions
+): Promise<void>;
+export function createPackageFromFiles(
+ src: string,
+ dest: string,
+ filenames: string[],
+ metadata?: InputMetadata,
+ options?: CreateOptions
+): Promise<void>;
+
+export function statFile(archive: string, filename: string, followLinks?: boolean): Metadata;
+export function getRawHeader(archive: string): ArchiveHeader;
+export function listPackage(archive: string, options?: ListOptions): string[];
+export function extractFile(archive: string, filename: string): Buffer;
+export function extractAll(archive: string, dest: string): void;
+export function uncache(archive: string): boolean;
+export function uncacheAll(): void;
diff --git a/desktop/node_modules/@electron/asar/lib/integrity.js b/desktop/node_modules/@electron/asar/lib/integrity.js
new file mode 100644
index 0000000..6fabee4
--- /dev/null
+++ b/desktop/node_modules/@electron/asar/lib/integrity.js
@@ -0,0 +1,62 @@
+const crypto = require('crypto')
+const fs = require('fs')
+const stream = require('stream')
+const { promisify } = require('util')
+
+const ALGORITHM = 'SHA256'
+// 4MB default block size
+const BLOCK_SIZE = 4 * 1024 * 1024
+
+const pipeline = promisify(stream.pipeline)
+
+function hashBlock (block) {
+ return crypto.createHash(ALGORITHM).update(block).digest('hex')
+}
+
+async function getFileIntegrity (path) {
+ const fileHash = crypto.createHash(ALGORITHM)
+
+ const blocks = []
+ let currentBlockSize = 0
+ let currentBlock = []
+
+ await pipeline(
+ fs.createReadStream(path),
+ new stream.PassThrough({
+ decodeStrings: false,
+ transform (_chunk, encoding, callback) {
+ fileHash.update(_chunk)
+
+ function handleChunk (chunk) {
+ const diffToSlice = Math.min(BLOCK_SIZE - currentBlockSize, chunk.byteLength)
+ currentBlockSize += diffToSlice
+ currentBlock.push(chunk.slice(0, diffToSlice))
+ if (currentBlockSize === BLOCK_SIZE) {
+ blocks.push(hashBlock(Buffer.concat(currentBlock)))
+ currentBlock = []
+ currentBlockSize = 0
+ }
+ if (diffToSlice < chunk.byteLength) {
+ handleChunk(chunk.slice(diffToSlice))
+ }
+ }
+ handleChunk(_chunk)
+ callback()
+ },
+ flush (callback) {
+ blocks.push(hashBlock(Buffer.concat(currentBlock)))
+ currentBlock = []
+ callback()
+ }
+ })
+ )
+
+ return {
+ algorithm: ALGORITHM,
+ hash: fileHash.digest('hex'),
+ blockSize: BLOCK_SIZE,
+ blocks: blocks
+ }
+}
+
+module.exports = getFileIntegrity
diff --git a/desktop/node_modules/@electron/asar/lib/pickle.js b/desktop/node_modules/@electron/asar/lib/pickle.js
new file mode 100644
index 0000000..6943655
--- /dev/null
+++ b/desktop/node_modules/@electron/asar/lib/pickle.js
@@ -0,0 +1,230 @@
+// sizeof(T).
+const SIZE_INT32 = 4
+const SIZE_UINT32 = 4
+const SIZE_INT64 = 8
+const SIZE_UINT64 = 8
+const SIZE_FLOAT = 4
+const SIZE_DOUBLE = 8
+
+// The allocation granularity of the payload.
+const PAYLOAD_UNIT = 64
+
+// Largest JS number.
+const CAPACITY_READ_ONLY = 9007199254740992
+
+// Aligns 'i' by rounding it up to the next multiple of 'alignment'.
+const alignInt = function (i, alignment) {
+ return i + (alignment - (i % alignment)) % alignment
+}
+
+// PickleIterator reads data from a Pickle. The Pickle object must remain valid
+// while the PickleIterator object is in use.
+const PickleIterator = (function () {
+ function PickleIterator (pickle) {
+ this.payload = pickle.header
+ this.payloadOffset = pickle.headerSize
+ this.readIndex = 0
+ this.endIndex = pickle.getPayloadSize()
+ }
+
+ PickleIterator.prototype.readBool = function () {
+ return this.readInt() !== 0
+ }
+
+ PickleIterator.prototype.readInt = function () {
+ return this.readBytes(SIZE_INT32, Buffer.prototype.readInt32LE)
+ }
+
+ PickleIterator.prototype.readUInt32 = function () {
+ return this.readBytes(SIZE_UINT32, Buffer.prototype.readUInt32LE)
+ }
+
+ PickleIterator.prototype.readInt64 = function () {
+ return this.readBytes(SIZE_INT64, Buffer.prototype.readInt64LE)
+ }
+
+ PickleIterator.prototype.readUInt64 = function () {
+ return this.readBytes(SIZE_UINT64, Buffer.prototype.readUInt64LE)
+ }
+
+ PickleIterator.prototype.readFloat = function () {
+ return this.readBytes(SIZE_FLOAT, Buffer.prototype.readFloatLE)
+ }
+
+ PickleIterator.prototype.readDouble = function () {
+ return this.readBytes(SIZE_DOUBLE, Buffer.prototype.readDoubleLE)
+ }
+
+ PickleIterator.prototype.readString = function () {
+ return this.readBytes(this.readInt()).toString()
+ }
+
+ PickleIterator.prototype.readBytes = function (length, method) {
+ const readPayloadOffset = this.getReadPayloadOffsetAndAdvance(length)
+ if (method != null) {
+ return method.call(this.payload, readPayloadOffset, length)
+ } else {
+ return this.payload.slice(readPayloadOffset, readPayloadOffset + length)
+ }
+ }
+
+ PickleIterator.prototype.getReadPayloadOffsetAndAdvance = function (length) {
+ if (length > this.endIndex - this.readIndex) {
+ this.readIndex = this.endIndex
+ throw new Error('Failed to read data with length of ' + length)
+ }
+ const readPayloadOffset = this.payloadOffset + this.readIndex
+ this.advance(length)
+ return readPayloadOffset
+ }
+
+ PickleIterator.prototype.advance = function (size) {
+ const alignedSize = alignInt(size, SIZE_UINT32)
+ if (this.endIndex - this.readIndex < alignedSize) {
+ this.readIndex = this.endIndex
+ } else {
+ this.readIndex += alignedSize
+ }
+ }
+
+ return PickleIterator
+})()
+
+// This class provides facilities for basic binary value packing and unpacking.
+//
+// The Pickle class supports appending primitive values (ints, strings, etc.)
+// to a pickle instance. The Pickle instance grows its internal memory buffer
+// dynamically to hold the sequence of primitive values. The internal memory
+// buffer is exposed as the "data" of the Pickle. This "data" can be passed
+// to a Pickle object to initialize it for reading.
+//
+// When reading from a Pickle object, it is important for the consumer to know
+// what value types to read and in what order to read them as the Pickle does
+// not keep track of the type of data written to it.
+//
+// The Pickle's data has a header which contains the size of the Pickle's
+// payload. It can optionally support additional space in the header. That
+// space is controlled by the header_size parameter passed to the Pickle
+// constructor.
+const Pickle = (function () {
+ function Pickle (buffer) {
+ if (buffer) {
+ this.initFromBuffer(buffer)
+ } else {
+ this.initEmpty()
+ }
+ }
+
+ Pickle.prototype.initEmpty = function () {
+ this.header = Buffer.alloc(0)
+ this.headerSize = SIZE_UINT32
+ this.capacityAfterHeader = 0
+ this.writeOffset = 0
+ this.resize(PAYLOAD_UNIT)
+ this.setPayloadSize(0)
+ }
+
+ Pickle.prototype.initFromBuffer = function (buffer) {
+ this.header = buffer
+ this.headerSize = buffer.length - this.getPayloadSize()
+ this.capacityAfterHeader = CAPACITY_READ_ONLY
+ this.writeOffset = 0
+ if (this.headerSize > buffer.length) {
+ this.headerSize = 0
+ }
+ if (this.headerSize !== alignInt(this.headerSize, SIZE_UINT32)) {
+ this.headerSize = 0
+ }
+ if (this.headerSize === 0) {
+ this.header = Buffer.alloc(0)
+ }
+ }
+
+ Pickle.prototype.createIterator = function () {
+ return new PickleIterator(this)
+ }
+
+ Pickle.prototype.toBuffer = function () {
+ return this.header.slice(0, this.headerSize + this.getPayloadSize())
+ }
+
+ Pickle.prototype.writeBool = function (value) {
+ return this.writeInt(value ? 1 : 0)
+ }
+
+ Pickle.prototype.writeInt = function (value) {
+ return this.writeBytes(value, SIZE_INT32, Buffer.prototype.writeInt32LE)
+ }
+
+ Pickle.prototype.writeUInt32 = function (value) {
+ return this.writeBytes(value, SIZE_UINT32, Buffer.prototype.writeUInt32LE)
+ }
+
+ Pickle.prototype.writeInt64 = function (value) {
+ return this.writeBytes(value, SIZE_INT64, Buffer.prototype.writeInt64LE)
+ }
+
+ Pickle.prototype.writeUInt64 = function (value) {
+ return this.writeBytes(value, SIZE_UINT64, Buffer.prototype.writeUInt64LE)
+ }
+
+ Pickle.prototype.writeFloat = function (value) {
+ return this.writeBytes(value, SIZE_FLOAT, Buffer.prototype.writeFloatLE)
+ }
+
+ Pickle.prototype.writeDouble = function (value) {
+ return this.writeBytes(value, SIZE_DOUBLE, Buffer.prototype.writeDoubleLE)
+ }
+
+ Pickle.prototype.writeString = function (value) {
+ const length = Buffer.byteLength(value, 'utf8')
+ if (!this.writeInt(length)) {
+ return false
+ }
+ return this.writeBytes(value, length)
+ }
+
+ Pickle.prototype.setPayloadSize = function (payloadSize) {
+ return this.header.writeUInt32LE(payloadSize, 0)
+ }
+
+ Pickle.prototype.getPayloadSize = function () {
+ return this.header.readUInt32LE(0)
+ }
+
+ Pickle.prototype.writeBytes = function (data, length, method) {
+ const dataLength = alignInt(length, SIZE_UINT32)
+ const newSize = this.writeOffset + dataLength
+ if (newSize > this.capacityAfterHeader) {
+ this.resize(Math.max(this.capacityAfterHeader * 2, newSize))
+ }
+ if (method != null) {
+ method.call(this.header, data, this.headerSize + this.writeOffset)
+ } else {
+ this.header.write(data, this.headerSize + this.writeOffset, length)
+ }
+ const endOffset = this.headerSize + this.writeOffset + length
+ this.header.fill(0, endOffset, endOffset + dataLength - length)
+ this.setPayloadSize(newSize)
+ this.writeOffset = newSize
+ return true
+ }
+
+ Pickle.prototype.resize = function (newCapacity) {
+ newCapacity = alignInt(newCapacity, PAYLOAD_UNIT)
+ this.header = Buffer.concat([this.header, Buffer.alloc(newCapacity)])
+ this.capacityAfterHeader = newCapacity
+ }
+
+ return Pickle
+})()
+
+module.exports = {
+ createEmpty: function () {
+ return new Pickle()
+ },
+
+ createFromBuffer: function (buffer) {
+ return new Pickle(buffer)
+ }
+}
diff --git a/desktop/node_modules/@electron/asar/lib/wrapped-fs.js b/desktop/node_modules/@electron/asar/lib/wrapped-fs.js
new file mode 100644
index 0000000..24f59d0
--- /dev/null
+++ b/desktop/node_modules/@electron/asar/lib/wrapped-fs.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const fs = process.versions.electron ? require('original-fs') : require('fs')
+
+const promisifiedMethods = [
+ 'lstat',
+ 'mkdtemp',
+ 'readFile',
+ 'stat',
+ 'writeFile'
+]
+
+const promisified = {}
+
+for (const method of Object.keys(fs)) {
+ if (promisifiedMethods.includes(method)) {
+ promisified[method] = fs.promises[method]
+ } else {
+ promisified[method] = fs[method]
+ }
+}
+// To make it more like fs-extra
+promisified.mkdirp = (dir) => fs.promises.mkdir(dir, { recursive: true })
+promisified.mkdirpSync = (dir) => fs.mkdirSync(dir, { recursive: true })
+
+module.exports = promisified