aboutsummaryrefslogtreecommitdiff
path: root/node_modules/got/source
diff options
context:
space:
mode:
authorMinteck <contact@minteck.org>2022-06-04 08:51:01 +0200
committerMinteck <contact@minteck.org>2022-06-04 08:51:01 +0200
commit383285ecd5292bf9a825e05904955b937de84cc9 (patch)
tree0a53b6f02c1604b078044567c03dc1b6c944c8c2 /node_modules/got/source
downloadequestriadb-383285ecd5292bf9a825e05904955b937de84cc9.tar.gz
equestriadb-383285ecd5292bf9a825e05904955b937de84cc9.tar.bz2
equestriadb-383285ecd5292bf9a825e05904955b937de84cc9.zip
Initial commit
Diffstat (limited to 'node_modules/got/source')
-rw-r--r--node_modules/got/source/as-promise.js108
-rw-r--r--node_modules/got/source/as-stream.js93
-rw-r--r--node_modules/got/source/create.js79
-rw-r--r--node_modules/got/source/errors.js107
-rw-r--r--node_modules/got/source/get-response.js31
-rw-r--r--node_modules/got/source/index.js60
-rw-r--r--node_modules/got/source/known-hook-events.js10
-rw-r--r--node_modules/got/source/merge.js73
-rw-r--r--node_modules/got/source/normalize-arguments.js265
-rw-r--r--node_modules/got/source/progress.js96
-rw-r--r--node_modules/got/source/request-as-event-emitter.js312
-rw-r--r--node_modules/got/source/utils/deep-freeze.js12
-rw-r--r--node_modules/got/source/utils/get-body-size.js32
-rw-r--r--node_modules/got/source/utils/is-form-data.js4
-rw-r--r--node_modules/got/source/utils/timed-out.js160
-rw-r--r--node_modules/got/source/utils/url-to-options.js25
16 files changed, 1467 insertions, 0 deletions
diff --git a/node_modules/got/source/as-promise.js b/node_modules/got/source/as-promise.js
new file mode 100644
index 0000000..c502325
--- /dev/null
+++ b/node_modules/got/source/as-promise.js
@@ -0,0 +1,108 @@
+'use strict';
+const EventEmitter = require('events');
+const getStream = require('get-stream');
+const is = require('@sindresorhus/is');
+const PCancelable = require('p-cancelable');
+const requestAsEventEmitter = require('./request-as-event-emitter');
+const {HTTPError, ParseError, ReadError} = require('./errors');
+const {options: mergeOptions} = require('./merge');
+const {reNormalize} = require('./normalize-arguments');
+
+const asPromise = options => {
+ const proxy = new EventEmitter();
+
+ const promise = new PCancelable((resolve, reject, onCancel) => {
+ const emitter = requestAsEventEmitter(options);
+
+ onCancel(emitter.abort);
+
+ emitter.on('response', async response => {
+ proxy.emit('response', response);
+
+ const stream = is.null(options.encoding) ? getStream.buffer(response) : getStream(response, options);
+
+ let data;
+ try {
+ data = await stream;
+ } catch (error) {
+ reject(new ReadError(error, options));
+ return;
+ }
+
+ const limitStatusCode = options.followRedirect ? 299 : 399;
+
+ response.body = data;
+
+ try {
+ for (const [index, hook] of Object.entries(options.hooks.afterResponse)) {
+ // eslint-disable-next-line no-await-in-loop
+ response = await hook(response, updatedOptions => {
+ updatedOptions = reNormalize(mergeOptions(options, {
+ ...updatedOptions,
+ retry: 0,
+ throwHttpErrors: false
+ }));
+
+ // Remove any further hooks for that request, because we we'll call them anyway.
+ // The loop continues. We don't want duplicates (asPromise recursion).
+ updatedOptions.hooks.afterResponse = options.hooks.afterResponse.slice(0, index);
+
+ return asPromise(updatedOptions);
+ });
+ }
+ } catch (error) {
+ reject(error);
+ return;
+ }
+
+ const {statusCode} = response;
+
+ if (options.json && response.body) {
+ try {
+ response.body = JSON.parse(response.body);
+ } catch (error) {
+ if (statusCode >= 200 && statusCode < 300) {
+ const parseError = new ParseError(error, statusCode, options, data);
+ Object.defineProperty(parseError, 'response', {value: response});
+ reject(parseError);
+ return;
+ }
+ }
+ }
+
+ if (statusCode !== 304 && (statusCode < 200 || statusCode > limitStatusCode)) {
+ const error = new HTTPError(response, options);
+ Object.defineProperty(error, 'response', {value: response});
+ if (emitter.retry(error) === false) {
+ if (options.throwHttpErrors) {
+ reject(error);
+ return;
+ }
+
+ resolve(response);
+ }
+
+ return;
+ }
+
+ resolve(response);
+ });
+
+ emitter.once('error', reject);
+ [
+ 'request',
+ 'redirect',
+ 'uploadProgress',
+ 'downloadProgress'
+ ].forEach(event => emitter.on(event, (...args) => proxy.emit(event, ...args)));
+ });
+
+ promise.on = (name, fn) => {
+ proxy.on(name, fn);
+ return promise;
+ };
+
+ return promise;
+};
+
+module.exports = asPromise;
diff --git a/node_modules/got/source/as-stream.js b/node_modules/got/source/as-stream.js
new file mode 100644
index 0000000..98c5342
--- /dev/null
+++ b/node_modules/got/source/as-stream.js
@@ -0,0 +1,93 @@
+'use strict';
+const {PassThrough} = require('stream');
+const duplexer3 = require('duplexer3');
+const requestAsEventEmitter = require('./request-as-event-emitter');
+const {HTTPError, ReadError} = require('./errors');
+
+module.exports = options => {
+ const input = new PassThrough();
+ const output = new PassThrough();
+ const proxy = duplexer3(input, output);
+ const piped = new Set();
+ let isFinished = false;
+
+ options.retry.retries = () => 0;
+
+ if (options.body) {
+ proxy.write = () => {
+ throw new Error('Got\'s stream is not writable when the `body` option is used');
+ };
+ }
+
+ const emitter = requestAsEventEmitter(options, input);
+
+ // Cancels the request
+ proxy._destroy = emitter.abort;
+
+ emitter.on('response', response => {
+ const {statusCode} = response;
+
+ response.on('error', error => {
+ proxy.emit('error', new ReadError(error, options));
+ });
+
+ if (options.throwHttpErrors && statusCode !== 304 && (statusCode < 200 || statusCode > 299)) {
+ proxy.emit('error', new HTTPError(response, options), null, response);
+ return;
+ }
+
+ isFinished = true;
+
+ response.pipe(output);
+
+ for (const destination of piped) {
+ if (destination.headersSent) {
+ continue;
+ }
+
+ for (const [key, value] of Object.entries(response.headers)) {
+ // Got gives *decompressed* data. Overriding `content-encoding` header would result in an error.
+ // It's not possible to decompress already decompressed data, is it?
+ const allowed = options.decompress ? key !== 'content-encoding' : true;
+ if (allowed) {
+ destination.setHeader(key, value);
+ }
+ }
+
+ destination.statusCode = response.statusCode;
+ }
+
+ proxy.emit('response', response);
+ });
+
+ [
+ 'error',
+ 'request',
+ 'redirect',
+ 'uploadProgress',
+ 'downloadProgress'
+ ].forEach(event => emitter.on(event, (...args) => proxy.emit(event, ...args)));
+
+ const pipe = proxy.pipe.bind(proxy);
+ const unpipe = proxy.unpipe.bind(proxy);
+ proxy.pipe = (destination, options) => {
+ if (isFinished) {
+ throw new Error('Failed to pipe. The response has been emitted already.');
+ }
+
+ const result = pipe(destination, options);
+
+ if (Reflect.has(destination, 'setHeader')) {
+ piped.add(destination);
+ }
+
+ return result;
+ };
+
+ proxy.unpipe = stream => {
+ piped.delete(stream);
+ return unpipe(stream);
+ };
+
+ return proxy;
+};
diff --git a/node_modules/got/source/create.js b/node_modules/got/source/create.js
new file mode 100644
index 0000000..b78c51f
--- /dev/null
+++ b/node_modules/got/source/create.js
@@ -0,0 +1,79 @@
+'use strict';
+const errors = require('./errors');
+const asStream = require('./as-stream');
+const asPromise = require('./as-promise');
+const normalizeArguments = require('./normalize-arguments');
+const merge = require('./merge');
+const deepFreeze = require('./utils/deep-freeze');
+
+const getPromiseOrStream = options => options.stream ? asStream(options) : asPromise(options);
+
+const aliases = [
+ 'get',
+ 'post',
+ 'put',
+ 'patch',
+ 'head',
+ 'delete'
+];
+
+const create = defaults => {
+ defaults = merge({}, defaults);
+ normalizeArguments.preNormalize(defaults.options);
+
+ if (!defaults.handler) {
+ // This can't be getPromiseOrStream, because when merging
+ // the chain would stop at this point and no further handlers would be called.
+ defaults.handler = (options, next) => next(options);
+ }
+
+ function got(url, options) {
+ try {
+ return defaults.handler(normalizeArguments(url, options, defaults), getPromiseOrStream);
+ } catch (error) {
+ if (options && options.stream) {
+ throw error;
+ } else {
+ return Promise.reject(error);
+ }
+ }
+ }
+
+ got.create = create;
+ got.extend = options => {
+ let mutableDefaults;
+ if (options && Reflect.has(options, 'mutableDefaults')) {
+ mutableDefaults = options.mutableDefaults;
+ delete options.mutableDefaults;
+ } else {
+ mutableDefaults = defaults.mutableDefaults;
+ }
+
+ return create({
+ options: merge.options(defaults.options, options),
+ handler: defaults.handler,
+ mutableDefaults
+ });
+ };
+
+ got.mergeInstances = (...args) => create(merge.instances(args));
+
+ got.stream = (url, options) => got(url, {...options, stream: true});
+
+ for (const method of aliases) {
+ got[method] = (url, options) => got(url, {...options, method});
+ got.stream[method] = (url, options) => got.stream(url, {...options, method});
+ }
+
+ Object.assign(got, {...errors, mergeOptions: merge.options});
+ Object.defineProperty(got, 'defaults', {
+ value: defaults.mutableDefaults ? defaults : deepFreeze(defaults),
+ writable: defaults.mutableDefaults,
+ configurable: defaults.mutableDefaults,
+ enumerable: true
+ });
+
+ return got;
+};
+
+module.exports = create;
diff --git a/node_modules/got/source/errors.js b/node_modules/got/source/errors.js
new file mode 100644
index 0000000..b6cbadc
--- /dev/null
+++ b/node_modules/got/source/errors.js
@@ -0,0 +1,107 @@
+'use strict';
+const urlLib = require('url');
+const http = require('http');
+const PCancelable = require('p-cancelable');
+const is = require('@sindresorhus/is');
+
+class GotError extends Error {
+ constructor(message, error, options) {
+ super(message);
+ Error.captureStackTrace(this, this.constructor);
+ this.name = 'GotError';
+
+ if (!is.undefined(error.code)) {
+ this.code = error.code;
+ }
+
+ Object.assign(this, {
+ host: options.host,
+ hostname: options.hostname,
+ method: options.method,
+ path: options.path,
+ socketPath: options.socketPath,
+ protocol: options.protocol,
+ url: options.href,
+ gotOptions: options
+ });
+ }
+}
+
+module.exports.GotError = GotError;
+
+module.exports.CacheError = class extends GotError {
+ constructor(error, options) {
+ super(error.message, error, options);
+ this.name = 'CacheError';
+ }
+};
+
+module.exports.RequestError = class extends GotError {
+ constructor(error, options) {
+ super(error.message, error, options);
+ this.name = 'RequestError';
+ }
+};
+
+module.exports.ReadError = class extends GotError {
+ constructor(error, options) {
+ super(error.message, error, options);
+ this.name = 'ReadError';
+ }
+};
+
+module.exports.ParseError = class extends GotError {
+ constructor(error, statusCode, options, data) {
+ super(`${error.message} in "${urlLib.format(options)}": \n${data.slice(0, 77)}...`, error, options);
+ this.name = 'ParseError';
+ this.statusCode = statusCode;
+ this.statusMessage = http.STATUS_CODES[this.statusCode];
+ }
+};
+
+module.exports.HTTPError = class extends GotError {
+ constructor(response, options) {
+ const {statusCode} = response;
+ let {statusMessage} = response;
+
+ if (statusMessage) {
+ statusMessage = statusMessage.replace(/\r?\n/g, ' ').trim();
+ } else {
+ statusMessage = http.STATUS_CODES[statusCode];
+ }
+
+ super(`Response code ${statusCode} (${statusMessage})`, {}, options);
+ this.name = 'HTTPError';
+ this.statusCode = statusCode;
+ this.statusMessage = statusMessage;
+ this.headers = response.headers;
+ this.body = response.body;
+ }
+};
+
+module.exports.MaxRedirectsError = class extends GotError {
+ constructor(statusCode, redirectUrls, options) {
+ super('Redirected 10 times. Aborting.', {}, options);
+ this.name = 'MaxRedirectsError';
+ this.statusCode = statusCode;
+ this.statusMessage = http.STATUS_CODES[this.statusCode];
+ this.redirectUrls = redirectUrls;
+ }
+};
+
+module.exports.UnsupportedProtocolError = class extends GotError {
+ constructor(options) {
+ super(`Unsupported protocol "${options.protocol}"`, {}, options);
+ this.name = 'UnsupportedProtocolError';
+ }
+};
+
+module.exports.TimeoutError = class extends GotError {
+ constructor(error, options) {
+ super(error.message, {code: 'ETIMEDOUT'}, options);
+ this.name = 'TimeoutError';
+ this.event = error.event;
+ }
+};
+
+module.exports.CancelError = PCancelable.CancelError;
diff --git a/node_modules/got/source/get-response.js b/node_modules/got/source/get-response.js
new file mode 100644
index 0000000..18453c2
--- /dev/null
+++ b/node_modules/got/source/get-response.js
@@ -0,0 +1,31 @@
+'use strict';
+const decompressResponse = require('decompress-response');
+const is = require('@sindresorhus/is');
+const mimicResponse = require('mimic-response');
+const progress = require('./progress');
+
+module.exports = (response, options, emitter) => {
+ const downloadBodySize = Number(response.headers['content-length']) || null;
+
+ const progressStream = progress.download(response, emitter, downloadBodySize);
+
+ mimicResponse(response, progressStream);
+
+ const newResponse = options.decompress === true &&
+ is.function(decompressResponse) &&
+ options.method !== 'HEAD' ? decompressResponse(progressStream) : progressStream;
+
+ if (!options.decompress && ['gzip', 'deflate'].includes(response.headers['content-encoding'])) {
+ options.encoding = null;
+ }
+
+ emitter.emit('response', newResponse);
+
+ emitter.emit('downloadProgress', {
+ percent: 0,
+ transferred: 0,
+ total: downloadBodySize
+ });
+
+ response.pipe(progressStream);
+};
diff --git a/node_modules/got/source/index.js b/node_modules/got/source/index.js
new file mode 100644
index 0000000..cbf7c37
--- /dev/null
+++ b/node_modules/got/source/index.js
@@ -0,0 +1,60 @@
+'use strict';
+const pkg = require('../package.json');
+const create = require('./create');
+
+const defaults = {
+ options: {
+ retry: {
+ retries: 2,
+ methods: [
+ 'GET',
+ 'PUT',
+ 'HEAD',
+ 'DELETE',
+ 'OPTIONS',
+ 'TRACE'
+ ],
+ statusCodes: [
+ 408,
+ 413,
+ 429,
+ 500,
+ 502,
+ 503,
+ 504
+ ],
+ errorCodes: [
+ 'ETIMEDOUT',
+ 'ECONNRESET',
+ 'EADDRINUSE',
+ 'ECONNREFUSED',
+ 'EPIPE',
+ 'ENOTFOUND',
+ 'ENETUNREACH',
+ 'EAI_AGAIN'
+ ]
+ },
+ headers: {
+ 'user-agent': `${pkg.name}/${pkg.version} (https://github.com/sindresorhus/got)`
+ },
+ hooks: {
+ beforeRequest: [],
+ beforeRedirect: [],
+ beforeRetry: [],
+ afterResponse: []
+ },
+ decompress: true,
+ throwHttpErrors: true,
+ followRedirect: true,
+ stream: false,
+ form: false,
+ json: false,
+ cache: false,
+ useElectronNet: false
+ },
+ mutableDefaults: false
+};
+
+const got = create(defaults);
+
+module.exports = got;
diff --git a/node_modules/got/source/known-hook-events.js b/node_modules/got/source/known-hook-events.js
new file mode 100644
index 0000000..cd245e1
--- /dev/null
+++ b/node_modules/got/source/known-hook-events.js
@@ -0,0 +1,10 @@
+'use strict';
+
+module.exports = [
+ 'beforeError',
+ 'init',
+ 'beforeRequest',
+ 'beforeRedirect',
+ 'beforeRetry',
+ 'afterResponse'
+];
diff --git a/node_modules/got/source/merge.js b/node_modules/got/source/merge.js
new file mode 100644
index 0000000..900f09a
--- /dev/null
+++ b/node_modules/got/source/merge.js
@@ -0,0 +1,73 @@
+'use strict';
+const {URL} = require('url');
+const is = require('@sindresorhus/is');
+const knownHookEvents = require('./known-hook-events');
+
+const merge = (target, ...sources) => {
+ for (const source of sources) {
+ for (const [key, sourceValue] of Object.entries(source)) {
+ if (is.undefined(sourceValue)) {
+ continue;
+ }
+
+ const targetValue = target[key];
+ if (is.urlInstance(targetValue) && (is.urlInstance(sourceValue) || is.string(sourceValue))) {
+ target[key] = new URL(sourceValue, targetValue);
+ } else if (is.plainObject(sourceValue)) {
+ if (is.plainObject(targetValue)) {
+ target[key] = merge({}, targetValue, sourceValue);
+ } else {
+ target[key] = merge({}, sourceValue);
+ }
+ } else if (is.array(sourceValue)) {
+ target[key] = merge([], sourceValue);
+ } else {
+ target[key] = sourceValue;
+ }
+ }
+ }
+
+ return target;
+};
+
+const mergeOptions = (...sources) => {
+ sources = sources.map(source => source || {});
+ const merged = merge({}, ...sources);
+
+ const hooks = {};
+ for (const hook of knownHookEvents) {
+ hooks[hook] = [];
+ }
+
+ for (const source of sources) {
+ if (source.hooks) {
+ for (const hook of knownHookEvents) {
+ hooks[hook] = hooks[hook].concat(source.hooks[hook]);
+ }
+ }
+ }
+
+ merged.hooks = hooks;
+
+ return merged;
+};
+
+const mergeInstances = (instances, methods) => {
+ const handlers = instances.map(instance => instance.defaults.handler);
+ const size = instances.length - 1;
+
+ return {
+ methods,
+ options: mergeOptions(...instances.map(instance => instance.defaults.options)),
+ handler: (options, next) => {
+ let iteration = -1;
+ const iterate = options => handlers[++iteration](options, iteration === size ? next : iterate);
+
+ return iterate(options);
+ }
+ };
+};
+
+module.exports = merge;
+module.exports.options = mergeOptions;
+module.exports.instances = mergeInstances;
diff --git a/node_modules/got/source/normalize-arguments.js b/node_modules/got/source/normalize-arguments.js
new file mode 100644
index 0000000..665cbce
--- /dev/null
+++ b/node_modules/got/source/normalize-arguments.js
@@ -0,0 +1,265 @@
+'use strict';
+const {URL, URLSearchParams} = require('url'); // TODO: Use the `URL` global when targeting Node.js 10
+const urlLib = require('url');
+const is = require('@sindresorhus/is');
+const urlParseLax = require('url-parse-lax');
+const lowercaseKeys = require('lowercase-keys');
+const urlToOptions = require('./utils/url-to-options');
+const isFormData = require('./utils/is-form-data');
+const merge = require('./merge');
+const knownHookEvents = require('./known-hook-events');
+
+const retryAfterStatusCodes = new Set([413, 429, 503]);
+
+// `preNormalize` handles static options (e.g. headers).
+// For example, when you create a custom instance and make a request
+// with no static changes, they won't be normalized again.
+//
+// `normalize` operates on dynamic options - they cannot be saved.
+// For example, `body` is everytime different per request.
+// When it's done normalizing the new options, it performs merge()
+// on the prenormalized options and the normalized ones.
+
+const preNormalize = (options, defaults) => {
+ if (is.nullOrUndefined(options.headers)) {
+ options.headers = {};
+ } else {
+ options.headers = lowercaseKeys(options.headers);
+ }
+
+ if (options.baseUrl && !options.baseUrl.toString().endsWith('/')) {
+ options.baseUrl += '/';
+ }
+
+ if (options.stream) {
+ options.json = false;
+ }
+
+ if (is.nullOrUndefined(options.hooks)) {
+ options.hooks = {};
+ } else if (!is.object(options.hooks)) {
+ throw new TypeError(`Parameter \`hooks\` must be an object, not ${is(options.hooks)}`);
+ }
+
+ for (const event of knownHookEvents) {
+ if (is.nullOrUndefined(options.hooks[event])) {
+ if (defaults) {
+ options.hooks[event] = [...defaults.hooks[event]];
+ } else {
+ options.hooks[event] = [];
+ }
+ }
+ }
+
+ if (is.number(options.timeout)) {
+ options.gotTimeout = {request: options.timeout};
+ } else if (is.object(options.timeout)) {
+ options.gotTimeout = options.timeout;
+ }
+
+ delete options.timeout;
+
+ const {retry} = options;
+ options.retry = {
+ retries: 0,
+ methods: [],
+ statusCodes: [],
+ errorCodes: []
+ };
+
+ if (is.nonEmptyObject(defaults) && retry !== false) {
+ options.retry = {...defaults.retry};
+ }
+
+ if (retry !== false) {
+ if (is.number(retry)) {
+ options.retry.retries = retry;
+ } else {
+ options.retry = {...options.retry, ...retry};
+ }
+ }
+
+ if (options.gotTimeout) {
+ options.retry.maxRetryAfter = Math.min(...[options.gotTimeout.request, options.gotTimeout.connection].filter(n => !is.nullOrUndefined(n)));
+ }
+
+ if (is.array(options.retry.methods)) {
+ options.retry.methods = new Set(options.retry.methods.map(method => method.toUpperCase()));
+ }
+
+ if (is.array(options.retry.statusCodes)) {
+ options.retry.statusCodes = new Set(options.retry.statusCodes);
+ }
+
+ if (is.array(options.retry.errorCodes)) {
+ options.retry.errorCodes = new Set(options.retry.errorCodes);
+ }
+
+ return options;
+};
+
+const normalize = (url, options, defaults) => {
+ if (is.plainObject(url)) {
+ options = {...url, ...options};
+ url = options.url || {};
+ delete options.url;
+ }
+
+ if (defaults) {
+ options = merge({}, defaults.options, options ? preNormalize(options, defaults.options) : {});
+ } else {
+ options = merge({}, preNormalize(options));
+ }
+
+ if (!is.string(url) && !is.object(url)) {
+ throw new TypeError(`Parameter \`url\` must be a string or object, not ${is(url)}`);
+ }
+
+ if (is.string(url)) {
+ if (options.baseUrl) {
+ if (url.toString().startsWith('/')) {
+ url = url.toString().slice(1);
+ }
+
+ url = urlToOptions(new URL(url, options.baseUrl));
+ } else {
+ url = url.replace(/^unix:/, 'http://$&');
+ url = urlParseLax(url);
+ }
+ } else if (is(url) === 'URL') {
+ url = urlToOptions(url);
+ }
+
+ // Override both null/undefined with default protocol
+ options = merge({path: ''}, url, {protocol: url.protocol || 'https:'}, options);
+
+ for (const hook of options.hooks.init) {
+ const called = hook(options);
+
+ if (is.promise(called)) {
+ throw new TypeError('The `init` hook must be a synchronous function');
+ }
+ }
+
+ const {baseUrl} = options;
+ Object.defineProperty(options, 'baseUrl', {
+ set: () => {
+ throw new Error('Failed to set baseUrl. Options are normalized already.');
+ },
+ get: () => baseUrl
+ });
+
+ const {query} = options;
+ if (is.nonEmptyString(query) || is.nonEmptyObject(query) || query instanceof URLSearchParams) {
+ if (!is.string(query)) {
+ options.query = (new URLSearchParams(query)).toString();
+ }
+
+ options.path = `${options.path.split('?')[0]}?${options.query}`;
+ delete options.query;
+ }
+
+ if (options.hostname === 'unix') {
+ const matches = /(.+?):(.+)/.exec(options.path);
+
+ if (matches) {
+ const [, socketPath, path] = matches;
+ options = {
+ ...options,
+ socketPath,
+ path,
+ host: null
+ };
+ }
+ }
+
+ const {headers} = options;
+ for (const [key, value] of Object.entries(headers)) {
+ if (is.nullOrUndefined(value)) {
+ delete headers[key];
+ }
+ }
+
+ if (options.json && is.undefined(headers.accept)) {
+ headers.accept = 'application/json';
+ }
+
+ if (options.decompress && is.undefined(headers['accept-encoding'])) {
+ headers['accept-encoding'] = 'gzip, deflate';
+ }
+
+ const {body} = options;
+ if (is.nullOrUndefined(body)) {
+ options.method = options.method ? options.method.toUpperCase() : 'GET';
+ } else {
+ const isObject = is.object(body) && !is.buffer(body) && !is.nodeStream(body);
+ if (!is.nodeStream(body) && !is.string(body) && !is.buffer(body) && !(options.form || options.json)) {
+ throw new TypeError('The `body` option must be a stream.Readable, string or Buffer');
+ }
+
+ if (options.json && !(isObject || is.array(body))) {
+ throw new TypeError('The `body` option must be an Object or Array when the `json` option is used');
+ }
+
+ if (options.form && !isObject) {
+ throw new TypeError('The `body` option must be an Object when the `form` option is used');
+ }
+
+ if (isFormData(body)) {
+ // Special case for https://github.com/form-data/form-data
+ headers['content-type'] = headers['content-type'] || `multipart/form-data; boundary=${body.getBoundary()}`;
+ } else if (options.form) {
+ headers['content-type'] = headers['content-type'] || 'application/x-www-form-urlencoded';
+ options.body = (new URLSearchParams(body)).toString();
+ } else if (options.json) {
+ headers['content-type'] = headers['content-type'] || 'application/json';
+ options.body = JSON.stringify(body);
+ }
+
+ options.method = options.method ? options.method.toUpperCase() : 'POST';
+ }
+
+ if (!is.function(options.retry.retries)) {
+ const {retries} = options.retry;
+
+ options.retry.retries = (iteration, error) => {
+ if (iteration > retries) {
+ return 0;
+ }
+
+ if ((!error || !options.retry.errorCodes.has(error.code)) && (!options.retry.methods.has(error.method) || !options.retry.statusCodes.has(error.statusCode))) {
+ return 0;
+ }
+
+ if (Reflect.has(error, 'headers') && Reflect.has(error.headers, 'retry-after') && retryAfterStatusCodes.has(error.statusCode)) {
+ let after = Number(error.headers['retry-after']);
+ if (is.nan(after)) {
+ after = Date.parse(error.headers['retry-after']) - Date.now();
+ } else {
+ after *= 1000;
+ }
+
+ if (after > options.retry.maxRetryAfter) {
+ return 0;
+ }
+
+ return after;
+ }
+
+ if (error.statusCode === 413) {
+ return 0;
+ }
+
+ const noise = Math.random() * 100;
+ return ((2 ** (iteration - 1)) * 1000) + noise;
+ };
+ }
+
+ return options;
+};
+
+const reNormalize = options => normalize(urlLib.format(options), options);
+
+module.exports = normalize;
+module.exports.preNormalize = preNormalize;
+module.exports.reNormalize = reNormalize;
diff --git a/node_modules/got/source/progress.js b/node_modules/got/source/progress.js
new file mode 100644
index 0000000..666abcf
--- /dev/null
+++ b/node_modules/got/source/progress.js
@@ -0,0 +1,96 @@
+'use strict';
+const {Transform} = require('stream');
+
+module.exports = {
+ download(response, emitter, downloadBodySize) {
+ let downloaded = 0;
+
+ return new Transform({
+ transform(chunk, encoding, callback) {
+ downloaded += chunk.length;
+
+ const percent = downloadBodySize ? downloaded / downloadBodySize : 0;
+
+ // Let `flush()` be responsible for emitting the last event
+ if (percent < 1) {
+ emitter.emit('downloadProgress', {
+ percent,
+ transferred: downloaded,
+ total: downloadBodySize
+ });
+ }
+
+ callback(null, chunk);
+ },
+
+ flush(callback) {
+ emitter.emit('downloadProgress', {
+ percent: 1,
+ transferred: downloaded,
+ total: downloadBodySize
+ });
+
+ callback();
+ }
+ });
+ },
+
+ upload(request, emitter, uploadBodySize) {
+ const uploadEventFrequency = 150;
+ let uploaded = 0;
+ let progressInterval;
+
+ emitter.emit('uploadProgress', {
+ percent: 0,
+ transferred: 0,
+ total: uploadBodySize
+ });
+
+ request.once('error', () => {
+ clearInterval(progressInterval);
+ });
+
+ request.once('response', () => {
+ clearInterval(progressInterval);
+
+ emitter.emit('uploadProgress', {
+ percent: 1,
+ transferred: uploaded,
+ total: uploadBodySize
+ });
+ });
+
+ request.once('socket', socket => {
+ const onSocketConnect = () => {
+ progressInterval = setInterval(() => {
+ const lastUploaded = uploaded;
+ /* istanbul ignore next: see #490 (occurs randomly!) */
+ const headersSize = request._header ? Buffer.byteLength(request._header) : 0;
+ uploaded = socket.bytesWritten - headersSize;
+
+ // Don't emit events with unchanged progress and
+ // prevent last event from being emitted, because
+ // it's emitted when `response` is emitted
+ if (uploaded === lastUploaded || uploaded === uploadBodySize) {
+ return;
+ }
+
+ emitter.emit('uploadProgress', {
+ percent: uploadBodySize ? uploaded / uploadBodySize : 0,
+ transferred: uploaded,
+ total: uploadBodySize
+ });
+ }, uploadEventFrequency);
+ };
+
+ /* istanbul ignore next: hard to test */
+ if (socket.connecting) {
+ socket.once('connect', onSocketConnect);
+ } else if (socket.writable) {
+ // The socket is being reused from pool,
+ // so the connect event will not be emitted
+ onSocketConnect();
+ }
+ });
+ }
+};
diff --git a/node_modules/got/source/request-as-event-emitter.js b/node_modules/got/source/request-as-event-emitter.js
new file mode 100644
index 0000000..79586af
--- /dev/null
+++ b/node_modules/got/source/request-as-event-emitter.js
@@ -0,0 +1,312 @@
+'use strict';
+const {URL} = require('url'); // TODO: Use the `URL` global when targeting Node.js 10
+const util = require('util');
+const EventEmitter = require('events');
+const http = require('http');
+const https = require('https');
+const urlLib = require('url');
+const CacheableRequest = require('cacheable-request');
+const toReadableStream = require('to-readable-stream');
+const is = require('@sindresorhus/is');
+const timer = require('@szmarczak/http-timer');
+const timedOut = require('./utils/timed-out');
+const getBodySize = require('./utils/get-body-size');
+const getResponse = require('./get-response');
+const progress = require('./progress');
+const {CacheError, UnsupportedProtocolError, MaxRedirectsError, RequestError, TimeoutError} = require('./errors');
+const urlToOptions = require('./utils/url-to-options');
+
+const getMethodRedirectCodes = new Set([300, 301, 302, 303, 304, 305, 307, 308]);
+const allMethodRedirectCodes = new Set([300, 303, 307, 308]);
+
+module.exports = (options, input) => {
+ const emitter = new EventEmitter();
+ const redirects = [];
+ let currentRequest;
+ let requestUrl;
+ let redirectString;
+ let uploadBodySize;
+ let retryCount = 0;
+ let shouldAbort = false;
+
+ const setCookie = options.cookieJar ? util.promisify(options.cookieJar.setCookie.bind(options.cookieJar)) : null;
+ const getCookieString = options.cookieJar ? util.promisify(options.cookieJar.getCookieString.bind(options.cookieJar)) : null;
+ const agents = is.object(options.agent) ? options.agent : null;
+
+ const emitError = async error => {
+ try {
+ for (const hook of options.hooks.beforeError) {
+ // eslint-disable-next-line no-await-in-loop
+ error = await hook(error);
+ }
+
+ emitter.emit('error', error);
+ } catch (error2) {
+ emitter.emit('error', error2);
+ }
+ };
+
+ const get = async options => {
+ const currentUrl = redirectString || requestUrl;
+
+ if (options.protocol !== 'http:' && options.protocol !== 'https:') {
+ throw new UnsupportedProtocolError(options);
+ }
+
+ decodeURI(currentUrl);
+
+ let fn;
+ if (is.function(options.request)) {
+ fn = {request: options.request};
+ } else {
+ fn = options.protocol === 'https:' ? https : http;
+ }
+
+ if (agents) {
+ const protocolName = options.protocol === 'https:' ? 'https' : 'http';
+ options.agent = agents[protocolName] || options.agent;
+ }
+
+ /* istanbul ignore next: electron.net is broken */
+ if (options.useElectronNet && process.versions.electron) {
+ const r = ({x: require})['yx'.slice(1)]; // Trick webpack
+ const electron = r('electron');
+ fn = electron.net || electron.remote.net;
+ }
+
+ if (options.cookieJar) {
+ const cookieString = await getCookieString(currentUrl, {});
+
+ if (is.nonEmptyString(cookieString)) {
+ options.headers.cookie = cookieString;
+ }
+ }
+
+ let timings;
+ const handleResponse = async response => {
+ try {
+ /* istanbul ignore next: fixes https://github.com/electron/electron/blob/cbb460d47628a7a146adf4419ed48550a98b2923/lib/browser/api/net.js#L59-L65 */
+ if (options.useElectronNet) {
+ response = new Proxy(response, {
+ get: (target, name) => {
+ if (name === 'trailers' || name === 'rawTrailers') {
+ return [];
+ }
+
+ const value = target[name];
+ return is.function(value) ? value.bind(target) : value;
+ }
+ });
+ }
+
+ const {statusCode} = response;
+ response.url = currentUrl;
+ response.requestUrl = requestUrl;
+ response.retryCount = retryCount;
+ response.timings = timings;
+ response.redirectUrls = redirects;
+ response.request = {
+ gotOptions: options
+ };
+
+ const rawCookies = response.headers['set-cookie'];
+ if (options.cookieJar && rawCookies) {
+ await Promise.all(rawCookies.map(rawCookie => setCookie(rawCookie, response.url)));
+ }
+
+ if (options.followRedirect && 'location' in response.headers) {
+ if (allMethodRedirectCodes.has(statusCode) || (getMethodRedirectCodes.has(statusCode) && (options.method === 'GET' || options.method === 'HEAD'))) {
+ response.resume(); // We're being redirected, we don't care about the response.
+
+ if (statusCode === 303) {
+ // Server responded with "see other", indicating that the resource exists at another location,
+ // and the client should request it from that location via GET or HEAD.
+ options.method = 'GET';
+ }
+
+ if (redirects.length >= 10) {
+ throw new MaxRedirectsError(statusCode, redirects, options);
+ }
+
+ // Handles invalid URLs. See https://github.com/sindresorhus/got/issues/604
+ const redirectBuffer = Buffer.from(response.headers.location, 'binary').toString();
+ const redirectURL = new URL(redirectBuffer, currentUrl);
+ redirectString = redirectURL.toString();
+
+ redirects.push(redirectString);
+
+ const redirectOptions = {
+ ...options,
+ ...urlToOptions(redirectURL)
+ };
+
+ for (const hook of options.hooks.beforeRedirect) {
+ // eslint-disable-next-line no-await-in-loop
+ await hook(redirectOptions);
+ }
+
+ emitter.emit('redirect', response, redirectOptions);
+
+ await get(redirectOptions);
+ return;
+ }
+ }
+
+ getResponse(response, options, emitter);
+ } catch (error) {
+ emitError(error);
+ }
+ };
+
+ const handleRequest = request => {
+ if (shouldAbort) {
+ request.once('error', () => {});
+ request.abort();
+ return;
+ }
+
+ currentRequest = request;
+
+ request.once('error', error => {
+ if (request.aborted) {
+ return;
+ }
+
+ if (error instanceof timedOut.TimeoutError) {
+ error = new TimeoutError(error, options);
+ } else {
+ error = new RequestError(error, options);
+ }
+
+ if (emitter.retry(error) === false) {
+ emitError(error);
+ }
+ });
+
+ timings = timer(request);
+
+ progress.upload(request, emitter, uploadBodySize);
+
+ if (options.gotTimeout) {
+ timedOut(request, options.gotTimeout, options);
+ }
+
+ emitter.emit('request', request);
+
+ const uploadComplete = () => {
+ request.emit('upload-complete');
+ };
+
+ try {
+ if (is.nodeStream(options.body)) {
+ options.body.once('end', uploadComplete);
+ options.body.pipe(request);
+ options.body = undefined;
+ } else if (options.body) {
+ request.end(options.body, uploadComplete);
+ } else if (input && (options.method === 'POST' || options.method === 'PUT' || options.method === 'PATCH')) {
+ input.once('end', uploadComplete);
+ input.pipe(request);
+ } else {
+ request.end(uploadComplete);
+ }
+ } catch (error) {
+ emitError(new RequestError(error, options));
+ }
+ };
+
+ if (options.cache) {
+ const cacheableRequest = new CacheableRequest(fn.request, options.cache);
+ const cacheRequest = cacheableRequest(options, handleResponse);
+
+ cacheRequest.once('error', error => {
+ if (error instanceof CacheableRequest.RequestError) {
+ emitError(new RequestError(error, options));
+ } else {
+ emitError(new CacheError(error, options));
+ }
+ });
+
+ cacheRequest.once('request', handleRequest);
+ } else {
+ // Catches errors thrown by calling fn.request(...)
+ try {
+ handleRequest(fn.request(options, handleResponse));
+ } catch (error) {
+ emitError(new RequestError(error, options));
+ }
+ }
+ };
+
+ emitter.retry = error => {
+ let backoff;
+
+ try {
+ backoff = options.retry.retries(++retryCount, error);
+ } catch (error2) {
+ emitError(error2);
+ return;
+ }
+
+ if (backoff) {
+ const retry = async options => {
+ try {
+ for (const hook of options.hooks.beforeRetry) {
+ // eslint-disable-next-line no-await-in-loop
+ await hook(options, error, retryCount);
+ }
+
+ await get(options);
+ } catch (error) {
+ emitError(error);
+ }
+ };
+
+ setTimeout(retry, backoff, {...options, forceRefresh: true});
+ return true;
+ }
+
+ return false;
+ };
+
+ emitter.abort = () => {
+ if (currentRequest) {
+ currentRequest.once('error', () => {});
+ currentRequest.abort();
+ } else {
+ shouldAbort = true;
+ }
+ };
+
+ setImmediate(async () => {
+ try {
+ // Convert buffer to stream to receive upload progress events (#322)
+ const {body} = options;
+ if (is.buffer(body)) {
+ options.body = toReadableStream(body);
+ uploadBodySize = body.length;
+ } else {
+ uploadBodySize = await getBodySize(options);
+ }
+
+ if (is.undefined(options.headers['content-length']) && is.undefined(options.headers['transfer-encoding'])) {
+ if ((uploadBodySize > 0 || options.method === 'PUT') && !is.null(uploadBodySize)) {
+ options.headers['content-length'] = uploadBodySize;
+ }
+ }
+
+ for (const hook of options.hooks.beforeRequest) {
+ // eslint-disable-next-line no-await-in-loop
+ await hook(options);
+ }
+
+ requestUrl = options.href || (new URL(options.path, urlLib.format(options))).toString();
+
+ await get(options);
+ } catch (error) {
+ emitError(error);
+ }
+ });
+
+ return emitter;
+};
diff --git a/node_modules/got/source/utils/deep-freeze.js b/node_modules/got/source/utils/deep-freeze.js
new file mode 100644
index 0000000..5052b71
--- /dev/null
+++ b/node_modules/got/source/utils/deep-freeze.js
@@ -0,0 +1,12 @@
+'use strict';
+const is = require('@sindresorhus/is');
+
+module.exports = function deepFreeze(object) {
+ for (const [key, value] of Object.entries(object)) {
+ if (is.plainObject(value) || is.array(value)) {
+ deepFreeze(object[key]);
+ }
+ }
+
+ return Object.freeze(object);
+};
diff --git a/node_modules/got/source/utils/get-body-size.js b/node_modules/got/source/utils/get-body-size.js
new file mode 100644
index 0000000..0df5af2
--- /dev/null
+++ b/node_modules/got/source/utils/get-body-size.js
@@ -0,0 +1,32 @@
+'use strict';
+const fs = require('fs');
+const util = require('util');
+const is = require('@sindresorhus/is');
+const isFormData = require('./is-form-data');
+
+module.exports = async options => {
+ const {body} = options;
+
+ if (options.headers['content-length']) {
+ return Number(options.headers['content-length']);
+ }
+
+ if (!body && !options.stream) {
+ return 0;
+ }
+
+ if (is.string(body)) {
+ return Buffer.byteLength(body);
+ }
+
+ if (isFormData(body)) {
+ return util.promisify(body.getLength.bind(body))();
+ }
+
+ if (body instanceof fs.ReadStream) {
+ const {size} = await util.promisify(fs.stat)(body.path);
+ return size;
+ }
+
+ return null;
+};
diff --git a/node_modules/got/source/utils/is-form-data.js b/node_modules/got/source/utils/is-form-data.js
new file mode 100644
index 0000000..0033618
--- /dev/null
+++ b/node_modules/got/source/utils/is-form-data.js
@@ -0,0 +1,4 @@
+'use strict';
+const is = require('@sindresorhus/is');
+
+module.exports = body => is.nodeStream(body) && is.function(body.getBoundary);
diff --git a/node_modules/got/source/utils/timed-out.js b/node_modules/got/source/utils/timed-out.js
new file mode 100644
index 0000000..33611a7
--- /dev/null
+++ b/node_modules/got/source/utils/timed-out.js
@@ -0,0 +1,160 @@
+'use strict';
+const net = require('net');
+
+class TimeoutError extends Error {
+ constructor(threshold, event) {
+ super(`Timeout awaiting '${event}' for ${threshold}ms`);
+ this.name = 'TimeoutError';
+ this.code = 'ETIMEDOUT';
+ this.event = event;
+ }
+}
+
+const reentry = Symbol('reentry');
+
+const noop = () => {};
+
+module.exports = (request, delays, options) => {
+ /* istanbul ignore next: this makes sure timed-out isn't called twice */
+ if (request[reentry]) {
+ return;
+ }
+
+ request[reentry] = true;
+
+ let stopNewTimeouts = false;
+
+ const addTimeout = (delay, callback, ...args) => {
+ // An error had been thrown before. Going further would result in uncaught errors.
+ // See https://github.com/sindresorhus/got/issues/631#issuecomment-435675051
+ if (stopNewTimeouts) {
+ return noop;
+ }
+
+ // Event loop order is timers, poll, immediates.
+ // The timed event may emit during the current tick poll phase, so
+ // defer calling the handler until the poll phase completes.
+ let immediate;
+ const timeout = setTimeout(() => {
+ immediate = setImmediate(callback, delay, ...args);
+ /* istanbul ignore next: added in node v9.7.0 */
+ if (immediate.unref) {
+ immediate.unref();
+ }
+ }, delay);
+
+ /* istanbul ignore next: in order to support electron renderer */
+ if (timeout.unref) {
+ timeout.unref();
+ }
+
+ const cancel = () => {
+ clearTimeout(timeout);
+ clearImmediate(immediate);
+ };
+
+ cancelers.push(cancel);
+
+ return cancel;
+ };
+
+ const {host, hostname} = options;
+ const timeoutHandler = (delay, event) => {
+ request.emit('error', new TimeoutError(delay, event));
+ request.once('error', () => {}); // Ignore the `socket hung up` error made by request.abort()
+
+ request.abort();
+ };
+
+ const cancelers = [];
+ const cancelTimeouts = () => {
+ stopNewTimeouts = true;
+ cancelers.forEach(cancelTimeout => cancelTimeout());
+ };
+
+ request.once('error', cancelTimeouts);
+ request.once('response', response => {
+ response.once('end', cancelTimeouts);
+ });
+
+ if (delays.request !== undefined) {
+ addTimeout(delays.request, timeoutHandler, 'request');
+ }
+
+ if (delays.socket !== undefined) {
+ const socketTimeoutHandler = () => {
+ timeoutHandler(delays.socket, 'socket');
+ };
+
+ request.setTimeout(delays.socket, socketTimeoutHandler);
+
+ // `request.setTimeout(0)` causes a memory leak.
+ // We can just remove the listener and forget about the timer - it's unreffed.
+ // See https://github.com/sindresorhus/got/issues/690
+ cancelers.push(() => request.removeListener('timeout', socketTimeoutHandler));
+ }
+
+ if (delays.lookup !== undefined && !request.socketPath && !net.isIP(hostname || host)) {
+ request.once('socket', socket => {
+ /* istanbul ignore next: hard to test */
+ if (socket.connecting) {
+ const cancelTimeout = addTimeout(delays.lookup, timeoutHandler, 'lookup');
+ socket.once('lookup', cancelTimeout);
+ }
+ });
+ }
+
+ if (delays.connect !== undefined) {
+ request.once('socket', socket => {
+ /* istanbul ignore next: hard to test */
+ if (socket.connecting) {
+ const timeConnect = () => addTimeout(delays.connect, timeoutHandler, 'connect');
+
+ if (request.socketPath || net.isIP(hostname || host)) {
+ socket.once('connect', timeConnect());
+ } else {
+ socket.once('lookup', error => {
+ if (error === null) {
+ socket.once('connect', timeConnect());
+ }
+ });
+ }
+ }
+ });
+ }
+
+ if (delays.secureConnect !== undefined && options.protocol === 'https:') {
+ request.once('socket', socket => {
+ /* istanbul ignore next: hard to test */
+ if (socket.connecting) {
+ socket.once('connect', () => {
+ const cancelTimeout = addTimeout(delays.secureConnect, timeoutHandler, 'secureConnect');
+ socket.once('secureConnect', cancelTimeout);
+ });
+ }
+ });
+ }
+
+ if (delays.send !== undefined) {
+ request.once('socket', socket => {
+ const timeRequest = () => addTimeout(delays.send, timeoutHandler, 'send');
+ /* istanbul ignore next: hard to test */
+ if (socket.connecting) {
+ socket.once('connect', () => {
+ request.once('upload-complete', timeRequest());
+ });
+ } else {
+ request.once('upload-complete', timeRequest());
+ }
+ });
+ }
+
+ if (delays.response !== undefined) {
+ request.once('upload-complete', () => {
+ const cancelTimeout = addTimeout(delays.response, timeoutHandler, 'response');
+ request.once('response', cancelTimeout);
+ });
+ }
+};
+
+module.exports.TimeoutError = TimeoutError;
diff --git a/node_modules/got/source/utils/url-to-options.js b/node_modules/got/source/utils/url-to-options.js
new file mode 100644
index 0000000..848ef30
--- /dev/null
+++ b/node_modules/got/source/utils/url-to-options.js
@@ -0,0 +1,25 @@
+'use strict';
+const is = require('@sindresorhus/is');
+
+module.exports = url => {
+ const options = {
+ protocol: url.protocol,
+ hostname: url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname,
+ hash: url.hash,
+ search: url.search,
+ pathname: url.pathname,
+ href: url.href
+ };
+
+ if (is.string(url.port) && url.port.length > 0) {
+ options.port = Number(url.port);
+ }
+
+ if (url.username || url.password) {
+ options.auth = `${url.username}:${url.password}`;
+ }
+
+ options.path = is.null(url.search) ? url.pathname : `${url.pathname}${url.search}`;
+
+ return options;
+};