diff options
author | Minteck <contact@minteck.org> | 2022-01-21 14:39:46 +0100 |
---|---|---|
committer | Minteck <contact@minteck.org> | 2022-01-21 14:39:46 +0100 |
commit | 3a04cb197f152f171fba679663a91a9bb61d710e (patch) | |
tree | 825d8f9b0daab3e434d85704144ca87492da9b92 | |
parent | c2aa7bf38fb30de2d04f87f8e7780e4c768ae6b1 (diff) | |
download | cobalt-3a04cb197f152f171fba679663a91a9bb61d710e.tar.gz cobalt-3a04cb197f152f171fba679663a91a9bb61d710e.tar.bz2 cobalt-3a04cb197f152f171fba679663a91a9bb61d710e.zip |
Add config management
199 files changed, 35642 insertions, 77 deletions
@@ -1,4 +1,5 @@ cache data data.old -output
\ No newline at end of file +output +config.yml
\ No newline at end of file diff --git a/config.default.yml b/config.default.yml new file mode 100644 index 0000000..7c34f43 --- /dev/null +++ b/config.default.yml @@ -0,0 +1,74 @@ +# ------------------------------------ +# +# ______ __ ____ +# / ____/___ / /_ ____ _/ / /_ +# / / / __ \/ __ \/ __ `/ / __/ +# / /___/ /_/ / /_/ / /_/ / / /_ +# \____/\____/_.___/\__,_/_/\__/ +# +# ------------------------------------ + +# A powerful, static and extensible CMS + +# Do you want a more user-friendly CMS? +# You might want to check out Neutron: +# https://gitlab.minteck.org/minteck/neutron + +# This is the Cobalt configuration file. +# Make sure this file is named 'config.yml' and not +# 'config.default.yml', as you don't need to edit +# the default configuration file. + +# How pages ID are constructed? +# ----------------------------- +# This will affect the generated HTML archive. +# All of these methods are cryptographically secure +# +# WARNING: the 'sha256' method might not work due to +# your filesystem's filename length limit. Use with +# caution +# +# Allowed values: +# - sha256: SHA-256 sum-based page IDs +# - snowflake: filename-based page IDs (default) +# - md5: MD5 sum-based page IDs +pages_id_generator: snowflake + +# Conditional publishing +# ---------------------- +# (thanks to antoine62 for the feature request) +# +# If enabled, Cobalt will detect if the 'publish' +# parameter in your Markdown's file frontmatter +# equals true. If (and only if) this condition is +# met, the file will be added. +# +# This doesn't affect folder index. +conditional_publishing: false + +# Classes for various HTML elements +# --------------------------------- +# If you import some custom CSS (such as Bootstrap), +# you might want to add custom DOM classes to generated +# HTML. +# +# Only the DOM elements mentioned below are supported by +# Cobalt. Use custom CSS to style the others. +classes: + main: [] + footer: [] + article: [] + header: [] + body: [] + html: [] + +# Additional JS/CSS imports +# ------------------------- +# Add URLs to JS or CSS files that will be imported into +# the generated HTML document. +# +# The JS file is imported on <head>, so you need to use +# document.addEventListener('load', ...) +imports: + css: [] + js: []
\ No newline at end of file @@ -1,12 +1,31 @@ +global.version = "1.2"; + +const YAML = require('yaml'); +const ejs = require('ejs'); + const fs = require('fs'); const path = require('path'); -const uniqueid = require('./modules/uniqueid'); const md = require('markdown-it')("commonmark", { html: true, linkify: true, typographer: true }); +const uniqueid = require('./modules/uniqueid'); +const containsPublishTag = require('./modules/publishtag'); +const removeFromSitemap = require('./modules/removesm'); + +if (!fs.existsSync("./config.yml")) fs.copyFileSync("./config.default.yml", "./config.yml"); +if (!fs.existsSync("./data")) fs.mkdirSync("./data"); + +if (fs.readdirSync("./data").length < 1) { + console.log("Your data folder is empty, add at least one Markdown file and start the rendering process again."); + process.exit(1); +} + +global.config = YAML.parse(fs.readFileSync("./config.yml").toString()); +global.config._version = global.version; + global.files = []; global.directories = []; global.sitemap = ""; @@ -31,31 +50,47 @@ function analyze(dir, levels) { sitemap = sitemap + "\n" + " ".repeat(levels - 1) + "* [" + file + "](./" + id + "/index.html)"; analyze(dir + "/" + file, levels + 1); } else if (file.endsWith(".md")) { - if (path.basename(file) === "index.md") { - id = uniqueid(dir + "/" + file) - files.push({ - source: dir + "/" + file, - dest: "./output/" + uniqueid(path.dirname(dir + "/" + file)) + "/index.html", - name: file, - id, - parent: { - path: "./index.html", - name: "index" - } - }) - } else { - id = uniqueid(dir + "/" + file) - files.push({ - source: dir + "/" + file, - dest: "./output/" + uniqueid(path.dirname(dir + "/" + file)) + "/" + id + ".html", - name: file, - id, - parent: { - path: "./" + uniqueid(path.dirname(dir + "/" + file)) + "/index.html", - name: path.basename(path.dirname(dir + "/" + file)) + if (!config.conditional_publishing || containsPublishTag(dir + "/" + file)) { + if (path.basename(file) === "index.md") { + if (dir === "./data") { + id = uniqueid(dir + "/" + file) + files.push({ + source: dir + "/" + file, + dest: "./output/index.html", + name: file, + id, + parent: { + path: "./index.html", + name: "index" + } + }) + } else { + id = uniqueid(dir + "/" + file) + files.push({ + source: dir + "/" + file, + dest: "./output/" + uniqueid(path.dirname(dir + "/" + file)) + "/index.html", + name: file, + id, + parent: { + path: "./index.html", + name: "index" + } + }) } - }) - sitemap = sitemap + "\n" + " ".repeat(levels - 1) + "* [" + file.substring(0, file.length - 3) + "](./" + uniqueid(path.dirname(dir + "/" + file)) + "/" + id + ".html)"; + } else { + id = uniqueid(dir + "/" + file) + files.push({ + source: dir + "/" + file, + dest: "./output/" + uniqueid(path.dirname(dir + "/" + file)) + "/" + id + ".html", + name: file, + id, + parent: { + path: "./" + uniqueid(path.dirname(dir + "/" + file)) + "/index.html", + name: path.basename(path.dirname(dir + "/" + file)) + } + }) + sitemap = sitemap + "\n" + " ".repeat(levels - 1) + "* [" + file.substring(0, file.length - 3) + "](./" + uniqueid(path.dirname(dir + "/" + file)) + "/" + id + ".html)"; + } } } } @@ -76,31 +111,33 @@ function aanalyze(dir, levels) { asitemap = asitemap + "\n" + " ".repeat(levels - 1) + "* [" + file + "](../" + id + "/index.html)"; aanalyze(dir + "/" + file, levels + 1); } else if (file.endsWith(".md")) { - if (path.basename(file) === "index.md") { - id = uniqueid(dir + "/" + file) - afiles.push({ - source: dir + "/" + file, - dest: "./output/" + uniqueid(path.dirname(dir + "/" + file)) + "/index.html", - name: file, - id, - parent: { - path: "./index.html", - name: "index" - } - }) - } else { - id = uniqueid(dir + "/" + file) - afiles.push({ - source: dir + "/" + file, - dest: "./output/" + uniqueid(path.dirname(dir + "/" + file)) + "/" + id + ".html", - name: file, - id, - parent: { - path: "./" + uniqueid(path.dirname(dir + "/" + file)) + "/index.html", - name: path.basename(path.dirname(dir + "/" + file)) - } - }) - asitemap = asitemap + "\n" + " ".repeat(levels - 1) + "* [" + file.substring(0, file.length - 3) + "](../" + uniqueid(path.dirname(dir + "/" + file)) + "/" + id + ".html)"; + if (!config.conditional_publishing || containsPublishTag(dir + "/" + file)) { + if (path.basename(file) === "index.md") { + id = uniqueid(dir + "/" + file) + afiles.push({ + source: dir + "/" + file, + dest: "./output/" + uniqueid(path.dirname(dir + "/" + file)) + "/index.html", + name: file, + id, + parent: { + path: "./index.html", + name: "index" + } + }) + } else { + id = uniqueid(dir + "/" + file) + afiles.push({ + source: dir + "/" + file, + dest: "./output/" + uniqueid(path.dirname(dir + "/" + file)) + "/" + id + ".html", + name: file, + id, + parent: { + path: "./" + uniqueid(path.dirname(dir + "/" + file)) + "/index.html", + name: path.basename(path.dirname(dir + "/" + file)) + } + }) + asitemap = asitemap + "\n" + " ".repeat(levels - 1) + "* [" + file.substring(0, file.length - 3) + "](../" + uniqueid(path.dirname(dir + "/" + file)) + "/" + id + ".html)"; + } } } } @@ -130,8 +167,8 @@ for (let dir of directories) { } console.log("Loading templates..."); -header = fs.readFileSync("./templates/header.html").toString(); -footer = fs.readFileSync("./templates/footer.html").toString(); +header = fs.readFileSync("./templates/header.ejs").toString(); +footer = fs.readFileSync("./templates/footer.ejs").toString(); console.log("Generating sitemaps for every directory..."); for (let dir of directories) { @@ -150,28 +187,36 @@ for (let dir of directories) { console.log("Rendering pages..."); for (let page of files) { content = md.render(fs.readFileSync(page.source).toString()); - fs.writeFileSync(page.dest, header.replaceAll("$TITLE$", page.name) + "<a href=\"." + page.parent.path + "\">« " + page.parent.name + "</a><hr>" + content + footer.replaceAll("$TITLE$", page.name)); + fs.writeFileSync(page.dest, ejs.render(header, {config, name: page.name}) + "<header class=\"" + config.classes.header.join(" ") + "\"><a href=\"." + page.parent.path + "\">« " + page.parent.name + "</a><hr></header><article class=\"" + config.classes.article.join(" ") + "\">" + content + "</article>" + ejs.render(footer, {config, name: page.name})); } console.log("Generating home page..."); if (fs.existsSync("./data/index.md")) { - content = md.render(fs.readFileSync(page.source).toString()) + "<hr>"; + content = md.render(fs.readFileSync("./data/index.md").toString()) + "<hr>"; } else { content = ""; } -fs.writeFileSync("./output/index.html", header.replaceAll("$TITLE$", "Home") + "Home<hr>" + content + md.render(sitemap) + footer.replaceAll("$TITLE$", "Home")); +fs.writeFileSync("./output/index.html", ejs.render(header, {config, name: "Home"}) + "<header class=\"" + config.classes.header.join(" ") + "\">Home<hr></header><article class=\"" + config.classes.article.join(" ") + "\">" + content + md.render(sitemap) + "</article>" + ejs.render(footer, {config, name: "Home"})); console.log("Generating missing index files..."); for (let dir of directories) { if (!fs.existsSync(dir.dest + "/index.html")) { if (sitemaps[dir.id].trim() !== "") { - fs.writeFileSync(dir.dest + "/index.html", header.replaceAll("$TITLE$", dir.name) + "<a href=\"../index.html\">« index</a><hr>" + md.render(sitemaps[dir.id]) + footer.replaceAll("$TITLE$", dir.name)); + fs.writeFileSync(dir.dest + "/index.html", ejs.render(header, {config, name: dir.name}) + "<header class=\"" + config.classes.header.join(" ") + "\"><a href=\"../index.html\">« index</a><hr></header><article class=\"" + config.classes.article.join(" ") + "\">" + md.render(sitemaps[dir.id]) + "</article>" + ejs.render(footer, {config, name: dir.name})); } else { - fs.writeFileSync(dir.dest + "/index.html", "<script>location.href = \"../index.html\";</script>"); + removeFromSitemap(dir.id); + console.log("Generating home page..."); + if (fs.existsSync("./data/index.md")) { + content = md.render(fs.readFileSync("./data/index.md").toString()) + "<hr>"; + } else { + content = ""; + } + fs.writeFileSync("./output/index.html", ejs.render(header, {config, name: "Home"}) + "<header class=\"" + config.classes.header.join(" ") + "\">Home<hr></header><article class=\"" + config.classes.article.join(" ") + "\">" + content + md.render(sitemap) + "</article>" + ejs.render(footer, {config, name: "Home"})); + fs.writeFileSync(dir.dest + "/index.html", ejs.render(header, {config, name: dir.name}) + "<header class=\"" + config.classes.header.join(" ") + "\"><a href=\"../index.html\">« index</a><hr></header><article class=\"" + config.classes.article.join(" ") + "\"><p><i>This folder is empty</i></p></article>" + ejs.render(footer, {config, name: dir.name})); } } else { if (sitemaps[dir.id].trim() !== "") { - fs.writeFileSync(dir.dest + "/index.html", header.replaceAll("$TITLE$", dir.name) + "<a href=\"../index.html\">« index</a><hr>" + md.render(fs.readFileSync(dir.source + "/index.md").toString()) + "<hr>" + md.render(sitemaps[dir.id]) + footer.replaceAll("$TITLE$", dir.name)); + fs.writeFileSync(dir.dest + "/index.html", ejs.render(header, {config, name: dir.name}) + "<header class=\"" + config.classes.header.join(" ") + "\"><a href=\"../index.html\">« index</a><hr></header><article class=\"" + config.classes.article.join(" ") + "\">" + md.render(fs.readFileSync(dir.source + "/index.md").toString()) + "<hr>" + md.render(sitemaps[dir.id]) + "</article>" + ejs.render(footer, {config, name: dir.name})); } } }
\ No newline at end of file diff --git a/modules/publishtag.js b/modules/publishtag.js new file mode 100644 index 0000000..9828443 --- /dev/null +++ b/modules/publishtag.js @@ -0,0 +1,16 @@ +const fs = require('fs'); + +module.exports = (file) => { + file = fs.readFileSync(file).toString(); + try { + frontmatter = file.split("---")[1].trim() + lines = frontmatter.replaceAll("\r\n", "\n").split("\n"); + if (lines.map(i => i.trim()).includes("publish: true") || lines.map(i => i.trim()).includes("publish : true") || lines.map(i => i.trim()).includes("publish :true")) { + return true; + } else { + return false; + } + } catch (e) { + return false; + } +}
\ No newline at end of file diff --git a/modules/removesm.js b/modules/removesm.js new file mode 100644 index 0000000..1f83c02 --- /dev/null +++ b/modules/removesm.js @@ -0,0 +1,9 @@ +module.exports = (id) => { + lines = sitemap.split("\n"); + for (let index in lines) { + if (lines[index].includes(id)) { + delete lines[index]; + } + } + global.sitemap = lines.filter(i => i).join("\n"); +}
\ No newline at end of file diff --git a/modules/removesubsm.js b/modules/removesubsm.js new file mode 100644 index 0000000..0fd7eef --- /dev/null +++ b/modules/removesubsm.js @@ -0,0 +1,9 @@ +module.exports = (rid, sid) => { + lines = sitemaps[sid].split("\n"); + for (let index in lines) { + if (lines[index].includes(id)) { + delete lines[index]; + } + } + global.sitemaps[sid] = lines.filter(i => i).join("\n"); +}
\ No newline at end of file diff --git a/modules/uniqueid.js b/modules/uniqueid.js index cbf4965..f0494e9 100644 --- a/modules/uniqueid.js +++ b/modules/uniqueid.js @@ -6,6 +6,16 @@ module.exports = function DocCMSUniqueID(file) { } else { salt = crypto.randomBytes(64).toString("hex"); } + + switch (config.pages_id_generator) { + case "snowflake": + default: + return parseInt(crypto.createHash("sha512").update(salt, "utf-8").digest("hex").substring(0, 16), 16).toString().substring(0, 18); + + case "md5": + return crypto.createHash("md5").update(salt, "utf-8").digest("hex"); - return parseInt(crypto.createHash("sha512").update(salt, "utf-8").digest("hex").substring(0, 16), 16).toString().substring(0, 18); + case "sha256": + return crypto.createHash("sha256").update(salt, "utf-8").digest("hex"); + } }
\ No newline at end of file diff --git a/node_modules/.bin/ejs b/node_modules/.bin/ejs new file mode 100644 index 0000000..002d2ac --- /dev/null +++ b/node_modules/.bin/ejs @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../ejs/bin/cli.js" "$@" +else + exec node "$basedir/../ejs/bin/cli.js" "$@" +fi diff --git a/node_modules/.bin/ejs.cmd b/node_modules/.bin/ejs.cmd new file mode 100644 index 0000000..7cc2b56 --- /dev/null +++ b/node_modules/.bin/ejs.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\ejs\bin\cli.js" %* diff --git a/node_modules/.bin/ejs.ps1 b/node_modules/.bin/ejs.ps1 new file mode 100644 index 0000000..f31305e --- /dev/null +++ b/node_modules/.bin/ejs.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../ejs/bin/cli.js" $args + } else { + & "$basedir/node$exe" "$basedir/../ejs/bin/cli.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../ejs/bin/cli.js" $args + } else { + & "node$exe" "$basedir/../ejs/bin/cli.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/jake b/node_modules/.bin/jake new file mode 100644 index 0000000..8580efe --- /dev/null +++ b/node_modules/.bin/jake @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../jake/bin/cli.js" "$@" +else + exec node "$basedir/../jake/bin/cli.js" "$@" +fi diff --git a/node_modules/.bin/jake.cmd b/node_modules/.bin/jake.cmd new file mode 100644 index 0000000..1ccccef --- /dev/null +++ b/node_modules/.bin/jake.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\jake\bin\cli.js" %* diff --git a/node_modules/.bin/jake.ps1 b/node_modules/.bin/jake.ps1 new file mode 100644 index 0000000..d86e1bd --- /dev/null +++ b/node_modules/.bin/jake.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../jake/bin/cli.js" $args + } else { + & "$basedir/node$exe" "$basedir/../jake/bin/cli.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../jake/bin/cli.js" $args + } else { + & "node$exe" "$basedir/../jake/bin/cli.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index 81b01a5..df4dc59 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -3,11 +3,86 @@ "lockfileVersion": 2, "requires": true, "packages": { + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, + "node_modules/async": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", + "integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "node_modules/ejs": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.6.tgz", + "integrity": "sha512-9lt9Zse4hPucPkoP7FHDF0LQAlGyF9JVpnClFLFH3aSSbxmyoqINRpp/9wePWJTUl4KOQwRL72Iw3InHPDkoGw==", + "dependencies": { + "jake": "^10.6.1" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/entities": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", @@ -16,6 +91,47 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/filelist": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.2.tgz", + "integrity": "sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ==", + "dependencies": { + "minimatch": "^3.0.4" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "engines": { + "node": ">=4" + } + }, + "node_modules/jake": { + "version": "10.8.2", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.2.tgz", + "integrity": "sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A==", + "dependencies": { + "async": "0.9.x", + "chalk": "^2.4.2", + "filelist": "^1.0.1", + "minimatch": "^3.0.4" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": "*" + } + }, "node_modules/linkify-it": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", @@ -44,10 +160,40 @@ "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=" }, + "node_modules/minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/uc.micro": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==" + }, + "node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "engines": { + "node": ">= 6" + } } } } diff --git a/node_modules/ansi-styles/index.js b/node_modules/ansi-styles/index.js new file mode 100644 index 0000000..90a871c --- /dev/null +++ b/node_modules/ansi-styles/index.js @@ -0,0 +1,165 @@ +'use strict'; +const colorConvert = require('color-convert'); + +const wrapAnsi16 = (fn, offset) => function () { + const code = fn.apply(colorConvert, arguments); + return `\u001B[${code + offset}m`; +}; + +const wrapAnsi256 = (fn, offset) => function () { + const code = fn.apply(colorConvert, arguments); + return `\u001B[${38 + offset};5;${code}m`; +}; + +const wrapAnsi16m = (fn, offset) => function () { + const rgb = fn.apply(colorConvert, arguments); + return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`; +}; + +function assembleStyles() { + const codes = new Map(); + const styles = { + modifier: { + reset: [0, 0], + // 21 isn't widely supported and 22 does the same thing + bold: [1, 22], + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29] + }, + color: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + gray: [90, 39], + + // Bright color + redBright: [91, 39], + greenBright: [92, 39], + yellowBright: [93, 39], + blueBright: [94, 39], + magentaBright: [95, 39], + cyanBright: [96, 39], + whiteBright: [97, 39] + }, + bgColor: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49], + + // Bright color + bgBlackBright: [100, 49], + bgRedBright: [101, 49], + bgGreenBright: [102, 49], + bgYellowBright: [103, 49], + bgBlueBright: [104, 49], + bgMagentaBright: [105, 49], + bgCyanBright: [106, 49], + bgWhiteBright: [107, 49] + } + }; + + // Fix humans + styles.color.grey = styles.color.gray; + + for (const groupName of Object.keys(styles)) { + const group = styles[groupName]; + + for (const styleName of Object.keys(group)) { + const style = group[styleName]; + + styles[styleName] = { + open: `\u001B[${style[0]}m`, + close: `\u001B[${style[1]}m` + }; + + group[styleName] = styles[styleName]; + + codes.set(style[0], style[1]); + } + + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false + }); + + Object.defineProperty(styles, 'codes', { + value: codes, + enumerable: false + }); + } + + const ansi2ansi = n => n; + const rgb2rgb = (r, g, b) => [r, g, b]; + + styles.color.close = '\u001B[39m'; + styles.bgColor.close = '\u001B[49m'; + + styles.color.ansi = { + ansi: wrapAnsi16(ansi2ansi, 0) + }; + styles.color.ansi256 = { + ansi256: wrapAnsi256(ansi2ansi, 0) + }; + styles.color.ansi16m = { + rgb: wrapAnsi16m(rgb2rgb, 0) + }; + + styles.bgColor.ansi = { + ansi: wrapAnsi16(ansi2ansi, 10) + }; + styles.bgColor.ansi256 = { + ansi256: wrapAnsi256(ansi2ansi, 10) + }; + styles.bgColor.ansi16m = { + rgb: wrapAnsi16m(rgb2rgb, 10) + }; + + for (let key of Object.keys(colorConvert)) { + if (typeof colorConvert[key] !== 'object') { + continue; + } + + const suite = colorConvert[key]; + + if (key === 'ansi16') { + key = 'ansi'; + } + + if ('ansi16' in suite) { + styles.color.ansi[key] = wrapAnsi16(suite.ansi16, 0); + styles.bgColor.ansi[key] = wrapAnsi16(suite.ansi16, 10); + } + + if ('ansi256' in suite) { + styles.color.ansi256[key] = wrapAnsi256(suite.ansi256, 0); + styles.bgColor.ansi256[key] = wrapAnsi256(suite.ansi256, 10); + } + + if ('rgb' in suite) { + styles.color.ansi16m[key] = wrapAnsi16m(suite.rgb, 0); + styles.bgColor.ansi16m[key] = wrapAnsi16m(suite.rgb, 10); + } + } + + return styles; +} + +// Make the export immutable +Object.defineProperty(module, 'exports', { + enumerable: true, + get: assembleStyles +}); diff --git a/node_modules/ansi-styles/license b/node_modules/ansi-styles/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/ansi-styles/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ansi-styles/package.json b/node_modules/ansi-styles/package.json new file mode 100644 index 0000000..65edb48 --- /dev/null +++ b/node_modules/ansi-styles/package.json @@ -0,0 +1,56 @@ +{ + "name": "ansi-styles", + "version": "3.2.1", + "description": "ANSI escape codes for styling strings in the terminal", + "license": "MIT", + "repository": "chalk/ansi-styles", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava", + "screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor" + }, + "files": [ + "index.js" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "color-convert": "^1.9.0" + }, + "devDependencies": { + "ava": "*", + "babel-polyfill": "^6.23.0", + "svg-term-cli": "^2.1.1", + "xo": "*" + }, + "ava": { + "require": "babel-polyfill" + } +} diff --git a/node_modules/ansi-styles/readme.md b/node_modules/ansi-styles/readme.md new file mode 100644 index 0000000..3158e2d --- /dev/null +++ b/node_modules/ansi-styles/readme.md @@ -0,0 +1,147 @@ +# ansi-styles [![Build Status](https://travis-ci.org/chalk/ansi-styles.svg?branch=master)](https://travis-ci.org/chalk/ansi-styles) + +> [ANSI escape codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal + +You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings. + +<img src="https://cdn.rawgit.com/chalk/ansi-styles/8261697c95bf34b6c7767e2cbe9941a851d59385/screenshot.svg" width="900"> + + +## Install + +``` +$ npm install ansi-styles +``` + + +## Usage + +```js +const style = require('ansi-styles'); + +console.log(`${style.green.open}Hello world!${style.green.close}`); + + +// Color conversion between 16/256/truecolor +// NOTE: If conversion goes to 16 colors or 256 colors, the original color +// may be degraded to fit that color palette. This means terminals +// that do not support 16 million colors will best-match the +// original color. +console.log(style.bgColor.ansi.hsl(120, 80, 72) + 'Hello world!' + style.bgColor.close); +console.log(style.color.ansi256.rgb(199, 20, 250) + 'Hello world!' + style.color.close); +console.log(style.color.ansi16m.hex('#ABCDEF') + 'Hello world!' + style.color.close); +``` + +## API + +Each style has an `open` and `close` property. + + +## Styles + +### Modifiers + +- `reset` +- `bold` +- `dim` +- `italic` *(Not widely supported)* +- `underline` +- `inverse` +- `hidden` +- `strikethrough` *(Not widely supported)* + +### Colors + +- `black` +- `red` +- `green` +- `yellow` +- `blue` +- `magenta` +- `cyan` +- `white` +- `gray` ("bright black") +- `redBright` +- `greenBright` +- `yellowBright` +- `blueBright` +- `magentaBright` +- `cyanBright` +- `whiteBright` + +### Background colors + +- `bgBlack` +- `bgRed` +- `bgGreen` +- `bgYellow` +- `bgBlue` +- `bgMagenta` +- `bgCyan` +- `bgWhite` +- `bgBlackBright` +- `bgRedBright` +- `bgGreenBright` +- `bgYellowBright` +- `bgBlueBright` +- `bgMagentaBright` +- `bgCyanBright` +- `bgWhiteBright` + + +## Advanced usage + +By default, you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module. + +- `style.modifier` +- `style.color` +- `style.bgColor` + +###### Example + +```js +console.log(style.color.green.open); +``` + +Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `style.codes`, which returns a `Map` with the open codes as keys and close codes as values. + +###### Example + +```js +console.log(style.codes.get(36)); +//=> 39 +``` + + +## [256 / 16 million (TrueColor) support](https://gist.github.com/XVilka/8346728) + +`ansi-styles` uses the [`color-convert`](https://github.com/Qix-/color-convert) package to allow for converting between various colors and ANSI escapes, with support for 256 and 16 million colors. + +To use these, call the associated conversion function with the intended output, for example: + +```js +style.color.ansi.rgb(100, 200, 15); // RGB to 16 color ansi foreground code +style.bgColor.ansi.rgb(100, 200, 15); // RGB to 16 color ansi background code + +style.color.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code +style.bgColor.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code + +style.color.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color foreground code +style.bgColor.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color background code +``` + + +## Related + +- [ansi-escapes](https://github.com/sindresorhus/ansi-escapes) - ANSI escape codes for manipulating the terminal + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/node_modules/async/.travis.yml b/node_modules/async/.travis.yml new file mode 100644 index 0000000..6064ca0 --- /dev/null +++ b/node_modules/async/.travis.yml @@ -0,0 +1,5 @@ +language: node_js +node_js: + - "0.10" + - "0.12" + - "iojs" diff --git a/node_modules/async/LICENSE b/node_modules/async/LICENSE new file mode 100644 index 0000000..8f29698 --- /dev/null +++ b/node_modules/async/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010-2014 Caolan McMahon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/async/README.md b/node_modules/async/README.md new file mode 100644 index 0000000..6cfb922 --- /dev/null +++ b/node_modules/async/README.md @@ -0,0 +1,1647 @@ +# Async.js + +[![Build Status via Travis CI](https://travis-ci.org/caolan/async.svg?branch=master)](https://travis-ci.org/caolan/async) + + +Async is a utility module which provides straight-forward, powerful functions +for working with asynchronous JavaScript. Although originally designed for +use with [Node.js](http://nodejs.org) and installable via `npm install async`, +it can also be used directly in the browser. + +Async is also installable via: + +- [bower](http://bower.io/): `bower install async` +- [component](https://github.com/component/component): `component install + caolan/async` +- [jam](http://jamjs.org/): `jam install async` +- [spm](http://spmjs.io/): `spm install async` + +Async provides around 20 functions that include the usual 'functional' +suspects (`map`, `reduce`, `filter`, `each`…) as well as some common patterns +for asynchronous control flow (`parallel`, `series`, `waterfall`…). All these +functions assume you follow the Node.js convention of providing a single +callback as the last argument of your `async` function. + + +## Quick Examples + +```javascript +async.map(['file1','file2','file3'], fs.stat, function(err, results){ + // results is now an array of stats for each file +}); + +async.filter(['file1','file2','file3'], fs.exists, function(results){ + // results now equals an array of the existing files +}); + +async.parallel([ + function(){ ... }, + function(){ ... } +], callback); + +async.series([ + function(){ ... }, + function(){ ... } +]); +``` + +There are many more functions available so take a look at the docs below for a +full list. This module aims to be comprehensive, so if you feel anything is +missing please create a GitHub issue for it. + +## Common Pitfalls + +### Binding a context to an iterator + +This section is really about `bind`, not about `async`. If you are wondering how to +make `async` execute your iterators in a given context, or are confused as to why +a method of another library isn't working as an iterator, study this example: + +```js +// Here is a simple object with an (unnecessarily roundabout) squaring method +var AsyncSquaringLibrary = { + squareExponent: 2, + square: function(number, callback){ + var result = Math.pow(number, this.squareExponent); + setTimeout(function(){ + callback(null, result); + }, 200); + } +}; + +async.map([1, 2, 3], AsyncSquaringLibrary.square, function(err, result){ + // result is [NaN, NaN, NaN] + // This fails because the `this.squareExponent` expression in the square + // function is not evaluated in the context of AsyncSquaringLibrary, and is + // therefore undefined. +}); + +async.map([1, 2, 3], AsyncSquaringLibrary.square.bind(AsyncSquaringLibrary), function(err, result){ + // result is [1, 4, 9] + // With the help of bind we can attach a context to the iterator before + // passing it to async. Now the square function will be executed in its + // 'home' AsyncSquaringLibrary context and the value of `this.squareExponent` + // will be as expected. +}); +``` + +## Download + +The source is available for download from +[GitHub](http://github.com/caolan/async). +Alternatively, you can install using Node Package Manager (`npm`): + + npm install async + +__Development:__ [async.js](https://github.com/caolan/async/raw/master/lib/async.js) - 29.6kb Uncompressed + +## In the Browser + +So far it's been tested in IE6, IE7, IE8, FF3.6 and Chrome 5. + +Usage: + +```html +<script type="text/javascript" src="async.js"></script> +<script type="text/javascript"> + + async.map(data, asyncProcess, function(err, results){ + alert(results); + }); + +</script> +``` + +## Documentation + +### Collections + +* [`each`](#each) +* [`eachSeries`](#eachSeries) +* [`eachLimit`](#eachLimit) +* [`map`](#map) +* [`mapSeries`](#mapSeries) +* [`mapLimit`](#mapLimit) +* [`filter`](#filter) +* [`filterSeries`](#filterSeries) +* [`reject`](#reject) +* [`rejectSeries`](#rejectSeries) +* [`reduce`](#reduce) +* [`reduceRight`](#reduceRight) +* [`detect`](#detect) +* [`detectSeries`](#detectSeries) +* [`sortBy`](#sortBy) +* [`some`](#some) +* [`every`](#every) +* [`concat`](#concat) +* [`concatSeries`](#concatSeries) + +### Control Flow + +* [`series`](#seriestasks-callback) +* [`parallel`](#parallel) +* [`parallelLimit`](#parallellimittasks-limit-callback) +* [`whilst`](#whilst) +* [`doWhilst`](#doWhilst) +* [`until`](#until) +* [`doUntil`](#doUntil) +* [`forever`](#forever) +* [`waterfall`](#waterfall) +* [`compose`](#compose) +* [`seq`](#seq) +* [`applyEach`](#applyEach) +* [`applyEachSeries`](#applyEachSeries) +* [`queue`](#queue) +* [`priorityQueue`](#priorityQueue) +* [`cargo`](#cargo) +* [`auto`](#auto) +* [`retry`](#retry) +* [`iterator`](#iterator) +* [`apply`](#apply) +* [`nextTick`](#nextTick) +* [`times`](#times) +* [`timesSeries`](#timesSeries) + +### Utils + +* [`memoize`](#memoize) +* [`unmemoize`](#unmemoize) +* [`log`](#log) +* [`dir`](#dir) +* [`noConflict`](#noConflict) + + +## Collections + +<a name="forEach" /> +<a name="each" /> +### each(arr, iterator, callback) + +Applies the function `iterator` to each item in `arr`, in parallel. +The `iterator` is called with an item from the list, and a callback for when it +has finished. If the `iterator` passes an error to its `callback`, the main +`callback` (for the `each` function) is immediately called with the error. + +Note, that since this function applies `iterator` to each item in parallel, +there is no guarantee that the iterator functions will complete in order. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err)` which must be called once it has + completed. If no error has occurred, the `callback` should be run without + arguments or with an explicit `null` argument. +* `callback(err)` - A callback which is called when all `iterator` functions + have finished, or an error occurs. + +__Examples__ + + +```js +// assuming openFiles is an array of file names and saveFile is a function +// to save the modified contents of that file: + +async.each(openFiles, saveFile, function(err){ + // if any of the saves produced an error, err would equal that error +}); +``` + +```js +// assuming openFiles is an array of file names + +async.each(openFiles, function(file, callback) { + + // Perform operation on file here. + console.log('Processing file ' + file); + + if( file.length > 32 ) { + console.log('This file name is too long'); + callback('File name too long'); + } else { + // Do work to process file here + console.log('File processed'); + callback(); + } +}, function(err){ + // if any of the file processing produced an error, err would equal that error + if( err ) { + // One of the iterations produced an error. + // All processing will now stop. + console.log('A file failed to process'); + } else { + console.log('All files have been processed successfully'); + } +}); +``` + +--------------------------------------- + +<a name="forEachSeries" /> +<a name="eachSeries" /> +### eachSeries(arr, iterator, callback) + +The same as [`each`](#each), only `iterator` is applied to each item in `arr` in +series. The next `iterator` is only called once the current one has completed. +This means the `iterator` functions will complete in order. + + +--------------------------------------- + +<a name="forEachLimit" /> +<a name="eachLimit" /> +### eachLimit(arr, limit, iterator, callback) + +The same as [`each`](#each), only no more than `limit` `iterator`s will be simultaneously +running at any time. + +Note that the items in `arr` are not processed in batches, so there is no guarantee that +the first `limit` `iterator` functions will complete before any others are started. + +__Arguments__ + +* `arr` - An array to iterate over. +* `limit` - The maximum number of `iterator`s to run at any time. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err)` which must be called once it has + completed. If no error has occurred, the callback should be run without + arguments or with an explicit `null` argument. +* `callback(err)` - A callback which is called when all `iterator` functions + have finished, or an error occurs. + +__Example__ + +```js +// Assume documents is an array of JSON objects and requestApi is a +// function that interacts with a rate-limited REST api. + +async.eachLimit(documents, 20, requestApi, function(err){ + // if any of the saves produced an error, err would equal that error +}); +``` + +--------------------------------------- + +<a name="map" /> +### map(arr, iterator, callback) + +Produces a new array of values by mapping each value in `arr` through +the `iterator` function. The `iterator` is called with an item from `arr` and a +callback for when it has finished processing. Each of these callback takes 2 arguments: +an `error`, and the transformed item from `arr`. If `iterator` passes an error to his +callback, the main `callback` (for the `map` function) is immediately called with the error. + +Note, that since this function applies the `iterator` to each item in parallel, +there is no guarantee that the `iterator` functions will complete in order. +However, the results array will be in the same order as the original `arr`. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, transformed)` which must be called once + it has completed with an error (which can be `null`) and a transformed item. +* `callback(err, results)` - A callback which is called when all `iterator` + functions have finished, or an error occurs. Results is an array of the + transformed items from the `arr`. + +__Example__ + +```js +async.map(['file1','file2','file3'], fs.stat, function(err, results){ + // results is now an array of stats for each file +}); +``` + +--------------------------------------- + +<a name="mapSeries" /> +### mapSeries(arr, iterator, callback) + +The same as [`map`](#map), only the `iterator` is applied to each item in `arr` in +series. The next `iterator` is only called once the current one has completed. +The results array will be in the same order as the original. + + +--------------------------------------- + +<a name="mapLimit" /> +### mapLimit(arr, limit, iterator, callback) + +The same as [`map`](#map), only no more than `limit` `iterator`s will be simultaneously +running at any time. + +Note that the items are not processed in batches, so there is no guarantee that +the first `limit` `iterator` functions will complete before any others are started. + +__Arguments__ + +* `arr` - An array to iterate over. +* `limit` - The maximum number of `iterator`s to run at any time. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, transformed)` which must be called once + it has completed with an error (which can be `null`) and a transformed item. +* `callback(err, results)` - A callback which is called when all `iterator` + calls have finished, or an error occurs. The result is an array of the + transformed items from the original `arr`. + +__Example__ + +```js +async.mapLimit(['file1','file2','file3'], 1, fs.stat, function(err, results){ + // results is now an array of stats for each file +}); +``` + +--------------------------------------- + +<a name="select" /> +<a name="filter" /> +### filter(arr, iterator, callback) + +__Alias:__ `select` + +Returns a new array of all the values in `arr` which pass an async truth test. +_The callback for each `iterator` call only accepts a single argument of `true` or +`false`; it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like `fs.exists`. This operation is +performed in parallel, but the results array will be in the same order as the +original. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in `arr`. + The `iterator` is passed a `callback(truthValue)`, which must be called with a + boolean argument once it has completed. +* `callback(results)` - A callback which is called after all the `iterator` + functions have finished. + +__Example__ + +```js +async.filter(['file1','file2','file3'], fs.exists, function(results){ + // results now equals an array of the existing files +}); +``` + +--------------------------------------- + +<a name="selectSeries" /> +<a name="filterSeries" /> +### filterSeries(arr, iterator, callback) + +__Alias:__ `selectSeries` + +The same as [`filter`](#filter) only the `iterator` is applied to each item in `arr` in +series. The next `iterator` is only called once the current one has completed. +The results array will be in the same order as the original. + +--------------------------------------- + +<a name="reject" /> +### reject(arr, iterator, callback) + +The opposite of [`filter`](#filter). Removes values that pass an `async` truth test. + +--------------------------------------- + +<a name="rejectSeries" /> +### rejectSeries(arr, iterator, callback) + +The same as [`reject`](#reject), only the `iterator` is applied to each item in `arr` +in series. + + +--------------------------------------- + +<a name="reduce" /> +### reduce(arr, memo, iterator, callback) + +__Aliases:__ `inject`, `foldl` + +Reduces `arr` into a single value using an async `iterator` to return +each successive step. `memo` is the initial state of the reduction. +This function only operates in series. + +For performance reasons, it may make sense to split a call to this function into +a parallel map, and then use the normal `Array.prototype.reduce` on the results. +This function is for situations where each step in the reduction needs to be async; +if you can get the data before reducing it, then it's probably a good idea to do so. + +__Arguments__ + +* `arr` - An array to iterate over. +* `memo` - The initial state of the reduction. +* `iterator(memo, item, callback)` - A function applied to each item in the + array to produce the next step in the reduction. The `iterator` is passed a + `callback(err, reduction)` which accepts an optional error as its first + argument, and the state of the reduction as the second. If an error is + passed to the callback, the reduction is stopped and the main `callback` is + immediately called with the error. +* `callback(err, result)` - A callback which is called after all the `iterator` + functions have finished. Result is the reduced value. + +__Example__ + +```js +async.reduce([1,2,3], 0, function(memo, item, callback){ + // pointless async: + process.nextTick(function(){ + callback(null, memo + item) + }); +}, function(err, result){ + // result is now equal to the last value of memo, which is 6 +}); +``` + +--------------------------------------- + +<a name="reduceRight" /> +### reduceRight(arr, memo, iterator, callback) + +__Alias:__ `foldr` + +Same as [`reduce`](#reduce), only operates on `arr` in reverse order. + + +--------------------------------------- + +<a name="detect" /> +### detect(arr, iterator, callback) + +Returns the first value in `arr` that passes an async truth test. The +`iterator` is applied in parallel, meaning the first iterator to return `true` will +fire the detect `callback` with that result. That means the result might not be +the first item in the original `arr` (in terms of order) that passes the test. + +If order within the original `arr` is important, then look at [`detectSeries`](#detectSeries). + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in `arr`. + The iterator is passed a `callback(truthValue)` which must be called with a + boolean argument once it has completed. +* `callback(result)` - A callback which is called as soon as any iterator returns + `true`, or after all the `iterator` functions have finished. Result will be + the first item in the array that passes the truth test (iterator) or the + value `undefined` if none passed. + +__Example__ + +```js +async.detect(['file1','file2','file3'], fs.exists, function(result){ + // result now equals the first file in the list that exists +}); +``` + +--------------------------------------- + +<a name="detectSeries" /> +### detectSeries(arr, iterator, callback) + +The same as [`detect`](#detect), only the `iterator` is applied to each item in `arr` +in series. This means the result is always the first in the original `arr` (in +terms of array order) that passes the truth test. + + +--------------------------------------- + +<a name="sortBy" /> +### sortBy(arr, iterator, callback) + +Sorts a list by the results of running each `arr` value through an async `iterator`. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, sortValue)` which must be called once it + has completed with an error (which can be `null`) and a value to use as the sort + criteria. +* `callback(err, results)` - A callback which is called after all the `iterator` + functions have finished, or an error occurs. Results is the items from + the original `arr` sorted by the values returned by the `iterator` calls. + +__Example__ + +```js +async.sortBy(['file1','file2','file3'], function(file, callback){ + fs.stat(file, function(err, stats){ + callback(err, stats.mtime); + }); +}, function(err, results){ + // results is now the original array of files sorted by + // modified date +}); +``` + +__Sort Order__ + +By modifying the callback parameter the sorting order can be influenced: + +```js +//ascending order +async.sortBy([1,9,3,5], function(x, callback){ + callback(null, x); +}, function(err,result){ + //result callback +} ); + +//descending order +async.sortBy([1,9,3,5], function(x, callback){ + callback(null, x*-1); //<- x*-1 instead of x, turns the order around +}, function(err,result){ + //result callback +} ); +``` + +--------------------------------------- + +<a name="some" /> +### some(arr, iterator, callback) + +__Alias:__ `any` + +Returns `true` if at least one element in the `arr` satisfies an async test. +_The callback for each iterator call only accepts a single argument of `true` or +`false`; it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like `fs.exists`. Once any iterator +call returns `true`, the main `callback` is immediately called. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in the array + in parallel. The iterator is passed a callback(truthValue) which must be + called with a boolean argument once it has completed. +* `callback(result)` - A callback which is called as soon as any iterator returns + `true`, or after all the iterator functions have finished. Result will be + either `true` or `false` depending on the values of the async tests. + +__Example__ + +```js +async.some(['file1','file2','file3'], fs.exists, function(result){ + // if result is true then at least one of the files exists +}); +``` + +--------------------------------------- + +<a name="every" /> +### every(arr, iterator, callback) + +__Alias:__ `all` + +Returns `true` if every element in `arr` satisfies an async test. +_The callback for each `iterator` call only accepts a single argument of `true` or +`false`; it does not accept an error argument first!_ This is in-line with the +way node libraries work with truth tests like `fs.exists`. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A truth test to apply to each item in the array + in parallel. The iterator is passed a callback(truthValue) which must be + called with a boolean argument once it has completed. +* `callback(result)` - A callback which is called after all the `iterator` + functions have finished. Result will be either `true` or `false` depending on + the values of the async tests. + +__Example__ + +```js +async.every(['file1','file2','file3'], fs.exists, function(result){ + // if result is true then every file exists +}); +``` + +--------------------------------------- + +<a name="concat" /> +### concat(arr, iterator, callback) + +Applies `iterator` to each item in `arr`, concatenating the results. Returns the +concatenated list. The `iterator`s are called in parallel, and the results are +concatenated as they return. There is no guarantee that the results array will +be returned in the original order of `arr` passed to the `iterator` function. + +__Arguments__ + +* `arr` - An array to iterate over. +* `iterator(item, callback)` - A function to apply to each item in `arr`. + The iterator is passed a `callback(err, results)` which must be called once it + has completed with an error (which can be `null`) and an array of results. +* `callback(err, results)` - A callback which is called after all the `iterator` + functions have finished, or an error occurs. Results is an array containing + the concatenated results of the `iterator` function. + +__Example__ + +```js +async.concat(['dir1','dir2','dir3'], fs.readdir, function(err, files){ + // files is now a list of filenames that exist in the 3 directories +}); +``` + +--------------------------------------- + +<a name="concatSeries" /> +### concatSeries(arr, iterator, callback) + +Same as [`concat`](#concat), but executes in series instead of parallel. + + +## Control Flow + +<a name="series" /> +### series(tasks, [callback]) + +Run the functions in the `tasks` array in series, each one running once the previous +function has completed. If any functions in the series pass an error to its +callback, no more functions are run, and `callback` is immediately called with the value of the error. +Otherwise, `callback` receives an array of results when `tasks` have completed. + +It is also possible to use an object instead of an array. Each property will be +run as a function, and the results will be passed to the final `callback` as an object +instead of an array. This can be a more readable way of handling results from +[`series`](#series). + +**Note** that while many implementations preserve the order of object properties, the +[ECMAScript Language Specifcation](http://www.ecma-international.org/ecma-262/5.1/#sec-8.6) +explicitly states that + +> The mechanics and order of enumerating the properties is not specified. + +So if you rely on the order in which your series of functions are executed, and want +this to work on all platforms, consider using an array. + +__Arguments__ + +* `tasks` - An array or object containing functions to run, each function is passed + a `callback(err, result)` it must call on completion with an error `err` (which can + be `null`) and an optional `result` value. +* `callback(err, results)` - An optional callback to run once all the functions + have completed. This function gets a results array (or object) containing all + the result arguments passed to the `task` callbacks. + +__Example__ + +```js +async.series([ + function(callback){ + // do some stuff ... + callback(null, 'one'); + }, + function(callback){ + // do some more stuff ... + callback(null, 'two'); + } +], +// optional callback +function(err, results){ + // results is now equal to ['one', 'two'] +}); + + +// an example using an object instead of an array +async.series({ + one: function(callback){ + setTimeout(function(){ + callback(null, 1); + }, 200); + }, + two: function(callback){ + setTimeout(function(){ + callback(null, 2); + }, 100); + } +}, +function(err, results) { + // results is now equal to: {one: 1, two: 2} +}); +``` + +--------------------------------------- + +<a name="parallel" /> +### parallel(tasks, [callback]) + +Run the `tasks` array of functions in parallel, without waiting until the previous +function has completed. If any of the functions pass an error to its +callback, the main `callback` is immediately called with the value of the error. +Once the `tasks` have completed, the results are passed to the final `callback` as an +array. + +It is also possible to use an object instead of an array. Each property will be +run as a function and the results will be passed to the final `callback` as an object +instead of an array. This can be a more readable way of handling results from +[`parallel`](#parallel). + + +__Arguments__ + +* `tasks` - An array or object containing functions to run. Each function is passed + a `callback(err, result)` which it must call on completion with an error `err` + (which can be `null`) and an optional `result` value. +* `callback(err, results)` - An optional callback to run once all the functions + have completed. This function gets a results array (or object) containing all + the result arguments passed to the task callbacks. + +__Example__ + +```js +async.parallel([ + function(callback){ + setTimeout(function(){ + callback(null, 'one'); + }, 200); + }, + function(callback){ + setTimeout(function(){ + callback(null, 'two'); + }, 100); + } +], +// optional callback +function(err, results){ + // the results array will equal ['one','two'] even though + // the second function had a shorter timeout. +}); + + +// an example using an object instead of an array +async.parallel({ + one: function(callback){ + setTimeout(function(){ + callback(null, 1); + }, 200); + }, + two: function(callback){ + setTimeout(function(){ + callback(null, 2); + }, 100); + } +}, +function(err, results) { + // results is now equals to: {one: 1, two: 2} +}); +``` + +--------------------------------------- + +<a name="parallelLimit" /> +### parallelLimit(tasks, limit, [callback]) + +The same as [`parallel`](#parallel), only `tasks` are executed in parallel +with a maximum of `limit` tasks executing at any time. + +Note that the `tasks` are not executed in batches, so there is no guarantee that +the first `limit` tasks will complete before any others are started. + +__Arguments__ + +* `tasks` - An array or object containing functions to run, each function is passed + a `callback(err, result)` it must call on completion with an error `err` (which can + be `null`) and an optional `result` value. +* `limit` - The maximum number of `tasks` to run at any time. +* `callback(err, results)` - An optional callback to run once all the functions + have completed. This function gets a results array (or object) containing all + the result arguments passed to the `task` callbacks. + +--------------------------------------- + +<a name="whilst" /> +### whilst(test, fn, callback) + +Repeatedly call `fn`, while `test` returns `true`. Calls `callback` when stopped, +or an error occurs. + +__Arguments__ + +* `test()` - synchronous truth test to perform before each execution of `fn`. +* `fn(callback)` - A function which is called each time `test` passes. The function is + passed a `callback(err)`, which must be called once it has completed with an + optional `err` argument. +* `callback(err)` - A callback which is called after the test fails and repeated + execution of `fn` has stopped. + +__Example__ + +```js +var count = 0; + +async.whilst( + function () { return count < 5; }, + function (callback) { + count++; + setTimeout(callback, 1000); + }, + function (err) { + // 5 seconds have passed + } +); +``` + +--------------------------------------- + +<a name="doWhilst" /> +### doWhilst(fn, test, callback) + +The post-check version of [`whilst`](#whilst). To reflect the difference in +the order of operations, the arguments `test` and `fn` are switched. + +`doWhilst` is to `whilst` as `do while` is to `while` in plain JavaScript. + +--------------------------------------- + +<a name="until" /> +### until(test, fn, callback) + +Repeatedly call `fn` until `test` returns `true`. Calls `callback` when stopped, +or an error occurs. + +The inverse of [`whilst`](#whilst). + +--------------------------------------- + +<a name="doUntil" /> +### doUntil(fn, test, callback) + +Like [`doWhilst`](#doWhilst), except the `test` is inverted. Note the argument ordering differs from `until`. + +--------------------------------------- + +<a name="forever" /> +### forever(fn, errback) + +Calls the asynchronous function `fn` with a callback parameter that allows it to +call itself again, in series, indefinitely. + +If an error is passed to the callback then `errback` is called with the +error, and execution stops, otherwise it will never be called. + +```js +async.forever( + function(next) { + // next is suitable for passing to things that need a callback(err [, whatever]); + // it will result in this function being called again. + }, + function(err) { + // if next is called with a value in its first parameter, it will appear + // in here as 'err', and execution will stop. + } +); +``` + +--------------------------------------- + +<a name="waterfall" /> +### waterfall(tasks, [callback]) + +Runs the `tasks` array of functions in series, each passing their results to the next in +the array. However, if any of the `tasks` pass an error to their own callback, the +next function is not executed, and the main `callback` is immediately called with +the error. + +__Arguments__ + +* `tasks` - An array of functions to run, each function is passed a + `callback(err, result1, result2, ...)` it must call on completion. The first + argument is an error (which can be `null`) and any further arguments will be + passed as arguments in order to the next task. +* `callback(err, [results])` - An optional callback to run once all the functions + have completed. This will be passed the results of the last task's callback. + + + +__Example__ + +```js +async.waterfall([ + function(callback) { + callback(null, 'one', 'two'); + }, + function(arg1, arg2, callback) { + // arg1 now equals 'one' and arg2 now equals 'two' + callback(null, 'three'); + }, + function(arg1, callback) { + // arg1 now equals 'three' + callback(null, 'done'); + } +], function (err, result) { + // result now equals 'done' +}); +``` + +--------------------------------------- +<a name="compose" /> +### compose(fn1, fn2...) + +Creates a function which is a composition of the passed asynchronous +functions. Each function consumes the return value of the function that +follows. Composing functions `f()`, `g()`, and `h()` would produce the result of +`f(g(h()))`, only this version uses callbacks to obtain the return values. + +Each function is executed with the `this` binding of the composed function. + +__Arguments__ + +* `functions...` - the asynchronous functions to compose + + +__Example__ + +```js +function add1(n, callback) { + setTimeout(function () { + callback(null, n + 1); + }, 10); +} + +function mul3(n, callback) { + setTimeout(function () { + callback(null, n * 3); + }, 10); +} + +var add1mul3 = async.compose(mul3, add1); + +add1mul3(4, function (err, result) { + // result now equals 15 +}); +``` + +--------------------------------------- +<a name="seq" /> +### seq(fn1, fn2...) + +Version of the compose function that is more natural to read. +Each function consumes the return value of the previous function. +It is the equivalent of [`compose`](#compose) with the arguments reversed. + +Each function is executed with the `this` binding of the composed function. + +__Arguments__ + +* functions... - the asynchronous functions to compose + + +__Example__ + +```js +// Requires lodash (or underscore), express3 and dresende's orm2. +// Part of an app, that fetches cats of the logged user. +// This example uses `seq` function to avoid overnesting and error +// handling clutter. +app.get('/cats', function(request, response) { + var User = request.models.User; + async.seq( + _.bind(User.get, User), // 'User.get' has signature (id, callback(err, data)) + function(user, fn) { + user.getCats(fn); // 'getCats' has signature (callback(err, data)) + } + )(req.session.user_id, function (err, cats) { + if (err) { + console.error(err); + response.json({ status: 'error', message: err.message }); + } else { + response.json({ status: 'ok', message: 'Cats found', data: cats }); + } + }); +}); +``` + +--------------------------------------- +<a name="applyEach" /> +### applyEach(fns, args..., callback) + +Applies the provided arguments to each function in the array, calling +`callback` after all functions have completed. If you only provide the first +argument, then it will return a function which lets you pass in the +arguments as if it were a single function call. + +__Arguments__ + +* `fns` - the asynchronous functions to all call with the same arguments +* `args...` - any number of separate arguments to pass to the function +* `callback` - the final argument should be the callback, called when all + functions have completed processing + + +__Example__ + +```js +async.applyEach([enableSearch, updateSchema], 'bucket', callback); + +// partial application example: +async.each( + buckets, + async.applyEach([enableSearch, updateSchema]), + callback +); +``` + +--------------------------------------- + +<a name="applyEachSeries" /> +### applyEachSeries(arr, iterator, callback) + +The same as [`applyEach`](#applyEach) only the functions are applied in series. + +--------------------------------------- + +<a name="queue" /> +### queue(worker, concurrency) + +Creates a `queue` object with the specified `concurrency`. Tasks added to the +`queue` are processed in parallel (up to the `concurrency` limit). If all +`worker`s are in progress, the task is queued until one becomes available. +Once a `worker` completes a `task`, that `task`'s callback is called. + +__Arguments__ + +* `worker(task, callback)` - An asynchronous function for processing a queued + task, which must call its `callback(err)` argument when finished, with an + optional `error` as an argument. +* `concurrency` - An `integer` for determining how many `worker` functions should be + run in parallel. + +__Queue objects__ + +The `queue` object returned by this function has the following properties and +methods: + +* `length()` - a function returning the number of items waiting to be processed. +* `started` - a function returning whether or not any items have been pushed and processed by the queue +* `running()` - a function returning the number of items currently being processed. +* `idle()` - a function returning false if there are items waiting or being processed, or true if not. +* `concurrency` - an integer for determining how many `worker` functions should be + run in parallel. This property can be changed after a `queue` is created to + alter the concurrency on-the-fly. +* `push(task, [callback])` - add a new task to the `queue`. Calls `callback` once + the `worker` has finished processing the task. Instead of a single task, a `tasks` array + can be submitted. The respective callback is used for every task in the list. +* `unshift(task, [callback])` - add a new task to the front of the `queue`. +* `saturated` - a callback that is called when the `queue` length hits the `concurrency` limit, + and further tasks will be queued. +* `empty` - a callback that is called when the last item from the `queue` is given to a `worker`. +* `drain` - a callback that is called when the last item from the `queue` has returned from the `worker`. +* `paused` - a boolean for determining whether the queue is in a paused state +* `pause()` - a function that pauses the processing of tasks until `resume()` is called. +* `resume()` - a function that resumes the processing of queued tasks when the queue is paused. +* `kill()` - a function that removes the `drain` callback and empties remaining tasks from the queue forcing it to go idle. + +__Example__ + +```js +// create a queue object with concurrency 2 + +var q = async.queue(function (task, callback) { + console.log('hello ' + task.name); + callback(); +}, 2); + + +// assign a callback +q.drain = function() { + console.log('all items have been processed'); +} + +// add some items to the queue + +q.push({name: 'foo'}, function (err) { + console.log('finished processing foo'); +}); +q.push({name: 'bar'}, function (err) { + console.log('finished processing bar'); +}); + +// add some items to the queue (batch-wise) + +q.push([{name: 'baz'},{name: 'bay'},{name: 'bax'}], function (err) { + console.log('finished processing item'); +}); + +// add some items to the front of the queue + +q.unshift({name: 'bar'}, function (err) { + console.log('finished processing bar'); +}); +``` + + +--------------------------------------- + +<a name="priorityQueue" /> +### priorityQueue(worker, concurrency) + +The same as [`queue`](#queue) only tasks are assigned a priority and completed in ascending priority order. There are two differences between `queue` and `priorityQueue` objects: + +* `push(task, priority, [callback])` - `priority` should be a number. If an array of + `tasks` is given, all tasks will be assigned the same priority. +* The `unshift` method was removed. + +--------------------------------------- + +<a name="cargo" /> +### cargo(worker, [payload]) + +Creates a `cargo` object with the specified payload. Tasks added to the +cargo will be processed altogether (up to the `payload` limit). If the +`worker` is in progress, the task is queued until it becomes available. Once +the `worker` has completed some tasks, each callback of those tasks is called. +Check out [this animation](https://camo.githubusercontent.com/6bbd36f4cf5b35a0f11a96dcd2e97711ffc2fb37/68747470733a2f2f662e636c6f75642e6769746875622e636f6d2f6173736574732f313637363837312f36383130382f62626330636662302d356632392d313165322d393734662d3333393763363464633835382e676966) for how `cargo` and `queue` work. + +While [queue](#queue) passes only one task to one of a group of workers +at a time, cargo passes an array of tasks to a single worker, repeating +when the worker is finished. + +__Arguments__ + +* `worker(tasks, callback)` - An asynchronous function for processing an array of + queued tasks, which must call its `callback(err)` argument when finished, with + an optional `err` argument. +* `payload` - An optional `integer` for determining how many tasks should be + processed per round; if omitted, the default is unlimited. + +__Cargo objects__ + +The `cargo` object returned by this function has the following properties and +methods: + +* `length()` - A function returning the number of items waiting to be processed. +* `payload` - An `integer` for determining how many tasks should be + process per round. This property can be changed after a `cargo` is created to + alter the payload on-the-fly. +* `push(task, [callback])` - Adds `task` to the `queue`. The callback is called + once the `worker` has finished processing the task. Instead of a single task, an array of `tasks` + can be submitted. The respective callback is used for every task in the list. +* `saturated` - A callback that is called when the `queue.length()` hits the concurrency and further tasks will be queued. +* `empty` - A callback that is called when the last item from the `queue` is given to a `worker`. +* `drain` - A callback that is called when the last item from the `queue` has returned from the `worker`. + +__Example__ + +```js +// create a cargo object with payload 2 + +var cargo = async.cargo(function (tasks, callback) { + for(var i=0; i<tasks.length; i++){ + console.log('hello ' + tasks[i].name); + } + callback(); +}, 2); + + +// add some items + +cargo.push({name: 'foo'}, function (err) { + console.log('finished processing foo'); +}); +cargo.push({name: 'bar'}, function (err) { + console.log('finished processing bar'); +}); +cargo.push({name: 'baz'}, function (err) { + console.log('finished processing baz'); +}); +``` + +--------------------------------------- + +<a name="auto" /> +### auto(tasks, [callback]) + +Determines the best order for running the functions in `tasks`, based on their +requirements. Each function can optionally depend on other functions being completed +first, and each function is run as soon as its requirements are satisfied. + +If any of the functions pass an error to their callback, it will not +complete (so any other functions depending on it will not run), and the main +`callback` is immediately called with the error. Functions also receive an +object containing the results of functions which have completed so far. + +Note, all functions are called with a `results` object as a second argument, +so it is unsafe to pass functions in the `tasks` object which cannot handle the +extra argument. + +For example, this snippet of code: + +```js +async.auto({ + readData: async.apply(fs.readFile, 'data.txt', 'utf-8') +}, callback); +``` + +will have the effect of calling `readFile` with the results object as the last +argument, which will fail: + +```js +fs.readFile('data.txt', 'utf-8', cb, {}); +``` + +Instead, wrap the call to `readFile` in a function which does not forward the +`results` object: + +```js +async.auto({ + readData: function(cb, results){ + fs.readFile('data.txt', 'utf-8', cb); + } +}, callback); +``` + +__Arguments__ + +* `tasks` - An object. Each of its properties is either a function or an array of + requirements, with the function itself the last item in the array. The object's key + of a property serves as the name of the task defined by that property, + i.e. can be used when specifying requirements for other tasks. + The function receives two arguments: (1) a `callback(err, result)` which must be + called when finished, passing an `error` (which can be `null`) and the result of + the function's execution, and (2) a `results` object, containing the results of + the previously executed functions. +* `callback(err, results)` - An optional callback which is called when all the + tasks have been completed. It receives the `err` argument if any `tasks` + pass an error to their callback. Results are always returned; however, if + an error occurs, no further `tasks` will be performed, and the results + object will only contain partial results. + + +__Example__ + +```js +async.auto({ + get_data: function(callback){ + console.log('in get_data'); + // async code to get some data + callback(null, 'data', 'converted to array'); + }, + make_folder: function(callback){ + console.log('in make_folder'); + // async code to create a directory to store a file in + // this is run at the same time as getting the data + callback(null, 'folder'); + }, + write_file: ['get_data', 'make_folder', function(callback, results){ + console.log('in write_file', JSON.stringify(results)); + // once there is some data and the directory exists, + // write the data to a file in the directory + callback(null, 'filename'); + }], + email_link: ['write_file', function(callback, results){ + console.log('in email_link', JSON.stringify(results)); + // once the file is written let's email a link to it... + // results.write_file contains the filename returned by write_file. + callback(null, {'file':results.write_file, 'email':'user@example.com'}); + }] +}, function(err, results) { + console.log('err = ', err); + console.log('results = ', results); +}); +``` + +This is a fairly trivial example, but to do this using the basic parallel and +series functions would look like this: + +```js +async.parallel([ + function(callback){ + console.log('in get_data'); + // async code to get some data + callback(null, 'data', 'converted to array'); + }, + function(callback){ + console.log('in make_folder'); + // async code to create a directory to store a file in + // this is run at the same time as getting the data + callback(null, 'folder'); + } +], +function(err, results){ + async.series([ + function(callback){ + console.log('in write_file', JSON.stringify(results)); + // once there is some data and the directory exists, + // write the data to a file in the directory + results.push('filename'); + callback(null); + }, + function(callback){ + console.log('in email_link', JSON.stringify(results)); + // once the file is written let's email a link to it... + callback(null, {'file':results.pop(), 'email':'user@example.com'}); + } + ]); +}); +``` + +For a complicated series of `async` tasks, using the [`auto`](#auto) function makes adding +new tasks much easier (and the code more readable). + + +--------------------------------------- + +<a name="retry" /> +### retry([times = 5], task, [callback]) + +Attempts to get a successful response from `task` no more than `times` times before +returning an error. If the task is successful, the `callback` will be passed the result +of the successful task. If all attempts fail, the callback will be passed the error and +result (if any) of the final attempt. + +__Arguments__ + +* `times` - An integer indicating how many times to attempt the `task` before giving up. Defaults to 5. +* `task(callback, results)` - A function which receives two arguments: (1) a `callback(err, result)` + which must be called when finished, passing `err` (which can be `null`) and the `result` of + the function's execution, and (2) a `results` object, containing the results of + the previously executed functions (if nested inside another control flow). +* `callback(err, results)` - An optional callback which is called when the + task has succeeded, or after the final failed attempt. It receives the `err` and `result` arguments of the last attempt at completing the `task`. + +The [`retry`](#retry) function can be used as a stand-alone control flow by passing a +callback, as shown below: + +```js +async.retry(3, apiMethod, function(err, result) { + // do something with the result +}); +``` + +It can also be embeded within other control flow functions to retry individual methods +that are not as reliable, like this: + +```js +async.auto({ + users: api.getUsers.bind(api), + payments: async.retry(3, api.getPayments.bind(api)) +}, function(err, results) { + // do something with the results +}); +``` + + +--------------------------------------- + +<a name="iterator" /> +### iterator(tasks) + +Creates an iterator function which calls the next function in the `tasks` array, +returning a continuation to call the next one after that. It's also possible to +“peek” at the next iterator with `iterator.next()`. + +This function is used internally by the `async` module, but can be useful when +you want to manually control the flow of functions in series. + +__Arguments__ + +* `tasks` - An array of functions to run. + +__Example__ + +```js +var iterator = async.iterator([ + function(){ sys.p('one'); }, + function(){ sys.p('two'); }, + function(){ sys.p('three'); } +]); + +node> var iterator2 = iterator(); +'one' +node> var iterator3 = iterator2(); +'two' +node> iterator3(); +'three' +node> var nextfn = iterator2.next(); +node> nextfn(); +'three' +``` + +--------------------------------------- + +<a name="apply" /> +### apply(function, arguments..) + +Creates a continuation function with some arguments already applied. + +Useful as a shorthand when combined with other control flow functions. Any arguments +passed to the returned function are added to the arguments originally passed +to apply. + +__Arguments__ + +* `function` - The function you want to eventually apply all arguments to. +* `arguments...` - Any number of arguments to automatically apply when the + continuation is called. + +__Example__ + +```js +// using apply + +async.parallel([ + async.apply(fs.writeFile, 'testfile1', 'test1'), + async.apply(fs.writeFile, 'testfile2', 'test2'), +]); + + +// the same process without using apply + +async.parallel([ + function(callback){ + fs.writeFile('testfile1', 'test1', callback); + }, + function(callback){ + fs.writeFile('testfile2', 'test2', callback); + } +]); +``` + +It's possible to pass any number of additional arguments when calling the +continuation: + +```js +node> var fn = async.apply(sys.puts, 'one'); +node> fn('two', 'three'); +one +two +three +``` + +--------------------------------------- + +<a name="nextTick" /> +### nextTick(callback), setImmediate(callback) + +Calls `callback` on a later loop around the event loop. In Node.js this just +calls `process.nextTick`; in the browser it falls back to `setImmediate(callback)` +if available, otherwise `setTimeout(callback, 0)`, which means other higher priority +events may precede the execution of `callback`. + +This is used internally for browser-compatibility purposes. + +__Arguments__ + +* `callback` - The function to call on a later loop around the event loop. + +__Example__ + +```js +var call_order = []; +async.nextTick(function(){ + call_order.push('two'); + // call_order now equals ['one','two'] +}); +call_order.push('one') +``` + +<a name="times" /> +### times(n, callback) + +Calls the `callback` function `n` times, and accumulates results in the same manner +you would use with [`map`](#map). + +__Arguments__ + +* `n` - The number of times to run the function. +* `callback` - The function to call `n` times. + +__Example__ + +```js +// Pretend this is some complicated async factory +var createUser = function(id, callback) { + callback(null, { + id: 'user' + id + }) +} +// generate 5 users +async.times(5, function(n, next){ + createUser(n, function(err, user) { + next(err, user) + }) +}, function(err, users) { + // we should now have 5 users +}); +``` + +<a name="timesSeries" /> +### timesSeries(n, callback) + +The same as [`times`](#times), only the iterator is applied to each item in `arr` in +series. The next `iterator` is only called once the current one has completed. +The results array will be in the same order as the original. + + +## Utils + +<a name="memoize" /> +### memoize(fn, [hasher]) + +Caches the results of an `async` function. When creating a hash to store function +results against, the callback is omitted from the hash and an optional hash +function can be used. + +The cache of results is exposed as the `memo` property of the function returned +by `memoize`. + +__Arguments__ + +* `fn` - The function to proxy and cache results from. +* `hasher` - Tn optional function for generating a custom hash for storing + results. It has all the arguments applied to it apart from the callback, and + must be synchronous. + +__Example__ + +```js +var slow_fn = function (name, callback) { + // do something + callback(null, result); +}; +var fn = async.memoize(slow_fn); + +// fn can now be used as if it were slow_fn +fn('some name', function () { + // callback +}); +``` + +<a name="unmemoize" /> +### unmemoize(fn) + +Undoes a [`memoize`](#memoize)d function, reverting it to the original, unmemoized +form. Handy for testing. + +__Arguments__ + +* `fn` - the memoized function + +<a name="log" /> +### log(function, arguments) + +Logs the result of an `async` function to the `console`. Only works in Node.js or +in browsers that support `console.log` and `console.error` (such as FF and Chrome). +If multiple arguments are returned from the async function, `console.log` is +called on each argument in order. + +__Arguments__ + +* `function` - The function you want to eventually apply all arguments to. +* `arguments...` - Any number of arguments to apply to the function. + +__Example__ + +```js +var hello = function(name, callback){ + setTimeout(function(){ + callback(null, 'hello ' + name); + }, 1000); +}; +``` +```js +node> async.log(hello, 'world'); +'hello world' +``` + +--------------------------------------- + +<a name="dir" /> +### dir(function, arguments) + +Logs the result of an `async` function to the `console` using `console.dir` to +display the properties of the resulting object. Only works in Node.js or +in browsers that support `console.dir` and `console.error` (such as FF and Chrome). +If multiple arguments are returned from the async function, `console.dir` is +called on each argument in order. + +__Arguments__ + +* `function` - The function you want to eventually apply all arguments to. +* `arguments...` - Any number of arguments to apply to the function. + +__Example__ + +```js +var hello = function(name, callback){ + setTimeout(function(){ + callback(null, {hello: name}); + }, 1000); +}; +``` +```js +node> async.dir(hello, 'world'); +{hello: 'world'} +``` + +--------------------------------------- + +<a name="noConflict" /> +### noConflict() + +Changes the value of `async` back to its original value, returning a reference to the +`async` object. diff --git a/node_modules/async/bower.json b/node_modules/async/bower.json new file mode 100644 index 0000000..1817688 --- /dev/null +++ b/node_modules/async/bower.json @@ -0,0 +1,38 @@ +{ + "name": "async", + "description": "Higher-order functions and common patterns for asynchronous code", + "version": "0.9.2", + "main": "lib/async.js", + "keywords": [ + "async", + "callback", + "utility", + "module" + ], + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/caolan/async.git" + }, + "devDependencies": { + "nodeunit": ">0.0.0", + "uglify-js": "1.2.x", + "nodelint": ">0.0.0", + "lodash": ">=2.4.1" + }, + "moduleType": [ + "amd", + "globals", + "node" + ], + "ignore": [ + "**/.*", + "node_modules", + "bower_components", + "test", + "tests" + ], + "authors": [ + "Caolan McMahon" + ] +}
\ No newline at end of file diff --git a/node_modules/async/component.json b/node_modules/async/component.json new file mode 100644 index 0000000..5003a7c --- /dev/null +++ b/node_modules/async/component.json @@ -0,0 +1,16 @@ +{ + "name": "async", + "description": "Higher-order functions and common patterns for asynchronous code", + "version": "0.9.2", + "keywords": [ + "async", + "callback", + "utility", + "module" + ], + "license": "MIT", + "repository": "caolan/async", + "scripts": [ + "lib/async.js" + ] +}
\ No newline at end of file diff --git a/node_modules/async/lib/async.js b/node_modules/async/lib/async.js new file mode 100644 index 0000000..394c41c --- /dev/null +++ b/node_modules/async/lib/async.js @@ -0,0 +1,1123 @@ +/*! + * async + * https://github.com/caolan/async + * + * Copyright 2010-2014 Caolan McMahon + * Released under the MIT license + */ +/*jshint onevar: false, indent:4 */ +/*global setImmediate: false, setTimeout: false, console: false */ +(function () { + + var async = {}; + + // global on the server, window in the browser + var root, previous_async; + + root = this; + if (root != null) { + previous_async = root.async; + } + + async.noConflict = function () { + root.async = previous_async; + return async; + }; + + function only_once(fn) { + var called = false; + return function() { + if (called) throw new Error("Callback was already called."); + called = true; + fn.apply(root, arguments); + } + } + + //// cross-browser compatiblity functions //// + + var _toString = Object.prototype.toString; + + var _isArray = Array.isArray || function (obj) { + return _toString.call(obj) === '[object Array]'; + }; + + var _each = function (arr, iterator) { + for (var i = 0; i < arr.length; i += 1) { + iterator(arr[i], i, arr); + } + }; + + var _map = function (arr, iterator) { + if (arr.map) { + return arr.map(iterator); + } + var results = []; + _each(arr, function (x, i, a) { + results.push(iterator(x, i, a)); + }); + return results; + }; + + var _reduce = function (arr, iterator, memo) { + if (arr.reduce) { + return arr.reduce(iterator, memo); + } + _each(arr, function (x, i, a) { + memo = iterator(memo, x, i, a); + }); + return memo; + }; + + var _keys = function (obj) { + if (Object.keys) { + return Object.keys(obj); + } + var keys = []; + for (var k in obj) { + if (obj.hasOwnProperty(k)) { + keys.push(k); + } + } + return keys; + }; + + //// exported async module functions //// + + //// nextTick implementation with browser-compatible fallback //// + if (typeof process === 'undefined' || !(process.nextTick)) { + if (typeof setImmediate === 'function') { + async.nextTick = function (fn) { + // not a direct alias for IE10 compatibility + setImmediate(fn); + }; + async.setImmediate = async.nextTick; + } + else { + async.nextTick = function (fn) { + setTimeout(fn, 0); + }; + async.setImmediate = async.nextTick; + } + } + else { + async.nextTick = process.nextTick; + if (typeof setImmediate !== 'undefined') { + async.setImmediate = function (fn) { + // not a direct alias for IE10 compatibility + setImmediate(fn); + }; + } + else { + async.setImmediate = async.nextTick; + } + } + + async.each = function (arr, iterator, callback) { + callback = callback || function () {}; + if (!arr.length) { + return callback(); + } + var completed = 0; + _each(arr, function (x) { + iterator(x, only_once(done) ); + }); + function done(err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + completed += 1; + if (completed >= arr.length) { + callback(); + } + } + } + }; + async.forEach = async.each; + + async.eachSeries = function (arr, iterator, callback) { + callback = callback || function () {}; + if (!arr.length) { + return callback(); + } + var completed = 0; + var iterate = function () { + iterator(arr[completed], function (err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + completed += 1; + if (completed >= arr.length) { + callback(); + } + else { + iterate(); + } + } + }); + }; + iterate(); + }; + async.forEachSeries = async.eachSeries; + + async.eachLimit = function (arr, limit, iterator, callback) { + var fn = _eachLimit(limit); + fn.apply(null, [arr, iterator, callback]); + }; + async.forEachLimit = async.eachLimit; + + var _eachLimit = function (limit) { + + return function (arr, iterator, callback) { + callback = callback || function () {}; + if (!arr.length || limit <= 0) { + return callback(); + } + var completed = 0; + var started = 0; + var running = 0; + + (function replenish () { + if (completed >= arr.length) { + return callback(); + } + + while (running < limit && started < arr.length) { + started += 1; + running += 1; + iterator(arr[started - 1], function (err) { + if (err) { + callback(err); + callback = function () {}; + } + else { + completed += 1; + running -= 1; + if (completed >= arr.length) { + callback(); + } + else { + replenish(); + } + } + }); + } + })(); + }; + }; + + + var doParallel = function (fn) { + return function () { + var args = Array.prototype.slice.call(arguments); + return fn.apply(null, [async.each].concat(args)); + }; + }; + var doParallelLimit = function(limit, fn) { + return function () { + var args = Array.prototype.slice.call(arguments); + return fn.apply(null, [_eachLimit(limit)].concat(args)); + }; + }; + var doSeries = function (fn) { + return function () { + var args = Array.prototype.slice.call(arguments); + return fn.apply(null, [async.eachSeries].concat(args)); + }; + }; + + + var _asyncMap = function (eachfn, arr, iterator, callback) { + arr = _map(arr, function (x, i) { + return {index: i, value: x}; + }); + if (!callback) { + eachfn(arr, function (x, callback) { + iterator(x.value, function (err) { + callback(err); + }); + }); + } else { + var results = []; + eachfn(arr, function (x, callback) { + iterator(x.value, function (err, v) { + results[x.index] = v; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + }; + async.map = doParallel(_asyncMap); + async.mapSeries = doSeries(_asyncMap); + async.mapLimit = function (arr, limit, iterator, callback) { + return _mapLimit(limit)(arr, iterator, callback); + }; + + var _mapLimit = function(limit) { + return doParallelLimit(limit, _asyncMap); + }; + + // reduce only has a series version, as doing reduce in parallel won't + // work in many situations. + async.reduce = function (arr, memo, iterator, callback) { + async.eachSeries(arr, function (x, callback) { + iterator(memo, x, function (err, v) { + memo = v; + callback(err); + }); + }, function (err) { + callback(err, memo); + }); + }; + // inject alias + async.inject = async.reduce; + // foldl alias + async.foldl = async.reduce; + + async.reduceRight = function (arr, memo, iterator, callback) { + var reversed = _map(arr, function (x) { + return x; + }).reverse(); + async.reduce(reversed, memo, iterator, callback); + }; + // foldr alias + async.foldr = async.reduceRight; + + var _filter = function (eachfn, arr, iterator, callback) { + var results = []; + arr = _map(arr, function (x, i) { + return {index: i, value: x}; + }); + eachfn(arr, function (x, callback) { + iterator(x.value, function (v) { + if (v) { + results.push(x); + } + callback(); + }); + }, function (err) { + callback(_map(results.sort(function (a, b) { + return a.index - b.index; + }), function (x) { + return x.value; + })); + }); + }; + async.filter = doParallel(_filter); + async.filterSeries = doSeries(_filter); + // select alias + async.select = async.filter; + async.selectSeries = async.filterSeries; + + var _reject = function (eachfn, arr, iterator, callback) { + var results = []; + arr = _map(arr, function (x, i) { + return {index: i, value: x}; + }); + eachfn(arr, function (x, callback) { + iterator(x.value, function (v) { + if (!v) { + results.push(x); + } + callback(); + }); + }, function (err) { + callback(_map(results.sort(function (a, b) { + return a.index - b.index; + }), function (x) { + return x.value; + })); + }); + }; + async.reject = doParallel(_reject); + async.rejectSeries = doSeries(_reject); + + var _detect = function (eachfn, arr, iterator, main_callback) { + eachfn(arr, function (x, callback) { + iterator(x, function (result) { + if (result) { + main_callback(x); + main_callback = function () {}; + } + else { + callback(); + } + }); + }, function (err) { + main_callback(); + }); + }; + async.detect = doParallel(_detect); + async.detectSeries = doSeries(_detect); + + async.some = function (arr, iterator, main_callback) { + async.each(arr, function (x, callback) { + iterator(x, function (v) { + if (v) { + main_callback(true); + main_callback = function () {}; + } + callback(); + }); + }, function (err) { + main_callback(false); + }); + }; + // any alias + async.any = async.some; + + async.every = function (arr, iterator, main_callback) { + async.each(arr, function (x, callback) { + iterator(x, function (v) { + if (!v) { + main_callback(false); + main_callback = function () {}; + } + callback(); + }); + }, function (err) { + main_callback(true); + }); + }; + // all alias + async.all = async.every; + + async.sortBy = function (arr, iterator, callback) { + async.map(arr, function (x, callback) { + iterator(x, function (err, criteria) { + if (err) { + callback(err); + } + else { + callback(null, {value: x, criteria: criteria}); + } + }); + }, function (err, results) { + if (err) { + return callback(err); + } + else { + var fn = function (left, right) { + var a = left.criteria, b = right.criteria; + return a < b ? -1 : a > b ? 1 : 0; + }; + callback(null, _map(results.sort(fn), function (x) { + return x.value; + })); + } + }); + }; + + async.auto = function (tasks, callback) { + callback = callback || function () {}; + var keys = _keys(tasks); + var remainingTasks = keys.length + if (!remainingTasks) { + return callback(); + } + + var results = {}; + + var listeners = []; + var addListener = function (fn) { + listeners.unshift(fn); + }; + var removeListener = function (fn) { + for (var i = 0; i < listeners.length; i += 1) { + if (listeners[i] === fn) { + listeners.splice(i, 1); + return; + } + } + }; + var taskComplete = function () { + remainingTasks-- + _each(listeners.slice(0), function (fn) { + fn(); + }); + }; + + addListener(function () { + if (!remainingTasks) { + var theCallback = callback; + // prevent final callback from calling itself if it errors + callback = function () {}; + + theCallback(null, results); + } + }); + + _each(keys, function (k) { + var task = _isArray(tasks[k]) ? tasks[k]: [tasks[k]]; + var taskCallback = function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + if (err) { + var safeResults = {}; + _each(_keys(results), function(rkey) { + safeResults[rkey] = results[rkey]; + }); + safeResults[k] = args; + callback(err, safeResults); + // stop subsequent errors hitting callback multiple times + callback = function () {}; + } + else { + results[k] = args; + async.setImmediate(taskComplete); + } + }; + var requires = task.slice(0, Math.abs(task.length - 1)) || []; + var ready = function () { + return _reduce(requires, function (a, x) { + return (a && results.hasOwnProperty(x)); + }, true) && !results.hasOwnProperty(k); + }; + if (ready()) { + task[task.length - 1](taskCallback, results); + } + else { + var listener = function () { + if (ready()) { + removeListener(listener); + task[task.length - 1](taskCallback, results); + } + }; + addListener(listener); + } + }); + }; + + async.retry = function(times, task, callback) { + var DEFAULT_TIMES = 5; + var attempts = []; + // Use defaults if times not passed + if (typeof times === 'function') { + callback = task; + task = times; + times = DEFAULT_TIMES; + } + // Make sure times is a number + times = parseInt(times, 10) || DEFAULT_TIMES; + var wrappedTask = function(wrappedCallback, wrappedResults) { + var retryAttempt = function(task, finalAttempt) { + return function(seriesCallback) { + task(function(err, result){ + seriesCallback(!err || finalAttempt, {err: err, result: result}); + }, wrappedResults); + }; + }; + while (times) { + attempts.push(retryAttempt(task, !(times-=1))); + } + async.series(attempts, function(done, data){ + data = data[data.length - 1]; + (wrappedCallback || callback)(data.err, data.result); + }); + } + // If a callback is passed, run this as a controll flow + return callback ? wrappedTask() : wrappedTask + }; + + async.waterfall = function (tasks, callback) { + callback = callback || function () {}; + if (!_isArray(tasks)) { + var err = new Error('First argument to waterfall must be an array of functions'); + return callback(err); + } + if (!tasks.length) { + return callback(); + } + var wrapIterator = function (iterator) { + return function (err) { + if (err) { + callback.apply(null, arguments); + callback = function () {}; + } + else { + var args = Array.prototype.slice.call(arguments, 1); + var next = iterator.next(); + if (next) { + args.push(wrapIterator(next)); + } + else { + args.push(callback); + } + async.setImmediate(function () { + iterator.apply(null, args); + }); + } + }; + }; + wrapIterator(async.iterator(tasks))(); + }; + + var _parallel = function(eachfn, tasks, callback) { + callback = callback || function () {}; + if (_isArray(tasks)) { + eachfn.map(tasks, function (fn, callback) { + if (fn) { + fn(function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + callback.call(null, err, args); + }); + } + }, callback); + } + else { + var results = {}; + eachfn.each(_keys(tasks), function (k, callback) { + tasks[k](function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + results[k] = args; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + }; + + async.parallel = function (tasks, callback) { + _parallel({ map: async.map, each: async.each }, tasks, callback); + }; + + async.parallelLimit = function(tasks, limit, callback) { + _parallel({ map: _mapLimit(limit), each: _eachLimit(limit) }, tasks, callback); + }; + + async.series = function (tasks, callback) { + callback = callback || function () {}; + if (_isArray(tasks)) { + async.mapSeries(tasks, function (fn, callback) { + if (fn) { + fn(function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + callback.call(null, err, args); + }); + } + }, callback); + } + else { + var results = {}; + async.eachSeries(_keys(tasks), function (k, callback) { + tasks[k](function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (args.length <= 1) { + args = args[0]; + } + results[k] = args; + callback(err); + }); + }, function (err) { + callback(err, results); + }); + } + }; + + async.iterator = function (tasks) { + var makeCallback = function (index) { + var fn = function () { + if (tasks.length) { + tasks[index].apply(null, arguments); + } + return fn.next(); + }; + fn.next = function () { + return (index < tasks.length - 1) ? makeCallback(index + 1): null; + }; + return fn; + }; + return makeCallback(0); + }; + + async.apply = function (fn) { + var args = Array.prototype.slice.call(arguments, 1); + return function () { + return fn.apply( + null, args.concat(Array.prototype.slice.call(arguments)) + ); + }; + }; + + var _concat = function (eachfn, arr, fn, callback) { + var r = []; + eachfn(arr, function (x, cb) { + fn(x, function (err, y) { + r = r.concat(y || []); + cb(err); + }); + }, function (err) { + callback(err, r); + }); + }; + async.concat = doParallel(_concat); + async.concatSeries = doSeries(_concat); + + async.whilst = function (test, iterator, callback) { + if (test()) { + iterator(function (err) { + if (err) { + return callback(err); + } + async.whilst(test, iterator, callback); + }); + } + else { + callback(); + } + }; + + async.doWhilst = function (iterator, test, callback) { + iterator(function (err) { + if (err) { + return callback(err); + } + var args = Array.prototype.slice.call(arguments, 1); + if (test.apply(null, args)) { + async.doWhilst(iterator, test, callback); + } + else { + callback(); + } + }); + }; + + async.until = function (test, iterator, callback) { + if (!test()) { + iterator(function (err) { + if (err) { + return callback(err); + } + async.until(test, iterator, callback); + }); + } + else { + callback(); + } + }; + + async.doUntil = function (iterator, test, callback) { + iterator(function (err) { + if (err) { + return callback(err); + } + var args = Array.prototype.slice.call(arguments, 1); + if (!test.apply(null, args)) { + async.doUntil(iterator, test, callback); + } + else { + callback(); + } + }); + }; + + async.queue = function (worker, concurrency) { + if (concurrency === undefined) { + concurrency = 1; + } + function _insert(q, data, pos, callback) { + if (!q.started){ + q.started = true; + } + if (!_isArray(data)) { + data = [data]; + } + if(data.length == 0) { + // call drain immediately if there are no tasks + return async.setImmediate(function() { + if (q.drain) { + q.drain(); + } + }); + } + _each(data, function(task) { + var item = { + data: task, + callback: typeof callback === 'function' ? callback : null + }; + + if (pos) { + q.tasks.unshift(item); + } else { + q.tasks.push(item); + } + + if (q.saturated && q.tasks.length === q.concurrency) { + q.saturated(); + } + async.setImmediate(q.process); + }); + } + + var workers = 0; + var q = { + tasks: [], + concurrency: concurrency, + saturated: null, + empty: null, + drain: null, + started: false, + paused: false, + push: function (data, callback) { + _insert(q, data, false, callback); + }, + kill: function () { + q.drain = null; + q.tasks = []; + }, + unshift: function (data, callback) { + _insert(q, data, true, callback); + }, + process: function () { + if (!q.paused && workers < q.concurrency && q.tasks.length) { + var task = q.tasks.shift(); + if (q.empty && q.tasks.length === 0) { + q.empty(); + } + workers += 1; + var next = function () { + workers -= 1; + if (task.callback) { + task.callback.apply(task, arguments); + } + if (q.drain && q.tasks.length + workers === 0) { + q.drain(); + } + q.process(); + }; + var cb = only_once(next); + worker(task.data, cb); + } + }, + length: function () { + return q.tasks.length; + }, + running: function () { + return workers; + }, + idle: function() { + return q.tasks.length + workers === 0; + }, + pause: function () { + if (q.paused === true) { return; } + q.paused = true; + }, + resume: function () { + if (q.paused === false) { return; } + q.paused = false; + // Need to call q.process once per concurrent + // worker to preserve full concurrency after pause + for (var w = 1; w <= q.concurrency; w++) { + async.setImmediate(q.process); + } + } + }; + return q; + }; + + async.priorityQueue = function (worker, concurrency) { + + function _compareTasks(a, b){ + return a.priority - b.priority; + }; + + function _binarySearch(sequence, item, compare) { + var beg = -1, + end = sequence.length - 1; + while (beg < end) { + var mid = beg + ((end - beg + 1) >>> 1); + if (compare(item, sequence[mid]) >= 0) { + beg = mid; + } else { + end = mid - 1; + } + } + return beg; + } + + function _insert(q, data, priority, callback) { + if (!q.started){ + q.started = true; + } + if (!_isArray(data)) { + data = [data]; + } + if(data.length == 0) { + // call drain immediately if there are no tasks + return async.setImmediate(function() { + if (q.drain) { + q.drain(); + } + }); + } + _each(data, function(task) { + var item = { + data: task, + priority: priority, + callback: typeof callback === 'function' ? callback : null + }; + + q.tasks.splice(_binarySearch(q.tasks, item, _compareTasks) + 1, 0, item); + + if (q.saturated && q.tasks.length === q.concurrency) { + q.saturated(); + } + async.setImmediate(q.process); + }); + } + + // Start with a normal queue + var q = async.queue(worker, concurrency); + + // Override push to accept second parameter representing priority + q.push = function (data, priority, callback) { + _insert(q, data, priority, callback); + }; + + // Remove unshift function + delete q.unshift; + + return q; + }; + + async.cargo = function (worker, payload) { + var working = false, + tasks = []; + + var cargo = { + tasks: tasks, + payload: payload, + saturated: null, + empty: null, + drain: null, + drained: true, + push: function (data, callback) { + if (!_isArray(data)) { + data = [data]; + } + _each(data, function(task) { + tasks.push({ + data: task, + callback: typeof callback === 'function' ? callback : null + }); + cargo.drained = false; + if (cargo.saturated && tasks.length === payload) { + cargo.saturated(); + } + }); + async.setImmediate(cargo.process); + }, + process: function process() { + if (working) return; + if (tasks.length === 0) { + if(cargo.drain && !cargo.drained) cargo.drain(); + cargo.drained = true; + return; + } + + var ts = typeof payload === 'number' + ? tasks.splice(0, payload) + : tasks.splice(0, tasks.length); + + var ds = _map(ts, function (task) { + return task.data; + }); + + if(cargo.empty) cargo.empty(); + working = true; + worker(ds, function () { + working = false; + + var args = arguments; + _each(ts, function (data) { + if (data.callback) { + data.callback.apply(null, args); + } + }); + + process(); + }); + }, + length: function () { + return tasks.length; + }, + running: function () { + return working; + } + }; + return cargo; + }; + + var _console_fn = function (name) { + return function (fn) { + var args = Array.prototype.slice.call(arguments, 1); + fn.apply(null, args.concat([function (err) { + var args = Array.prototype.slice.call(arguments, 1); + if (typeof console !== 'undefined') { + if (err) { + if (console.error) { + console.error(err); + } + } + else if (console[name]) { + _each(args, function (x) { + console[name](x); + }); + } + } + }])); + }; + }; + async.log = _console_fn('log'); + async.dir = _console_fn('dir'); + /*async.info = _console_fn('info'); + async.warn = _console_fn('warn'); + async.error = _console_fn('error');*/ + + async.memoize = function (fn, hasher) { + var memo = {}; + var queues = {}; + hasher = hasher || function (x) { + return x; + }; + var memoized = function () { + var args = Array.prototype.slice.call(arguments); + var callback = args.pop(); + var key = hasher.apply(null, args); + if (key in memo) { + async.nextTick(function () { + callback.apply(null, memo[key]); + }); + } + else if (key in queues) { + queues[key].push(callback); + } + else { + queues[key] = [callback]; + fn.apply(null, args.concat([function () { + memo[key] = arguments; + var q = queues[key]; + delete queues[key]; + for (var i = 0, l = q.length; i < l; i++) { + q[i].apply(null, arguments); + } + }])); + } + }; + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; + }; + + async.unmemoize = function (fn) { + return function () { + return (fn.unmemoized || fn).apply(null, arguments); + }; + }; + + async.times = function (count, iterator, callback) { + var counter = []; + for (var i = 0; i < count; i++) { + counter.push(i); + } + return async.map(counter, iterator, callback); + }; + + async.timesSeries = function (count, iterator, callback) { + var counter = []; + for (var i = 0; i < count; i++) { + counter.push(i); + } + return async.mapSeries(counter, iterator, callback); + }; + + async.seq = function (/* functions... */) { + var fns = arguments; + return function () { + var that = this; + var args = Array.prototype.slice.call(arguments); + var callback = args.pop(); + async.reduce(fns, args, function (newargs, fn, cb) { + fn.apply(that, newargs.concat([function () { + var err = arguments[0]; + var nextargs = Array.prototype.slice.call(arguments, 1); + cb(err, nextargs); + }])) + }, + function (err, results) { + callback.apply(that, [err].concat(results)); + }); + }; + }; + + async.compose = function (/* functions... */) { + return async.seq.apply(null, Array.prototype.reverse.call(arguments)); + }; + + var _applyEach = function (eachfn, fns /*args...*/) { + var go = function () { + var that = this; + var args = Array.prototype.slice.call(arguments); + var callback = args.pop(); + return eachfn(fns, function (fn, cb) { + fn.apply(that, args.concat([cb])); + }, + callback); + }; + if (arguments.length > 2) { + var args = Array.prototype.slice.call(arguments, 2); + return go.apply(this, args); + } + else { + return go; + } + }; + async.applyEach = doParallel(_applyEach); + async.applyEachSeries = doSeries(_applyEach); + + async.forever = function (fn, callback) { + function next(err) { + if (err) { + if (callback) { + return callback(err); + } + throw err; + } + fn(next); + } + next(); + }; + + // Node.js + if (typeof module !== 'undefined' && module.exports) { + module.exports = async; + } + // AMD / RequireJS + else if (typeof define !== 'undefined' && define.amd) { + define([], function () { + return async; + }); + } + // included directly via <script> tag + else { + root.async = async; + } + +}()); diff --git a/node_modules/async/package.json b/node_modules/async/package.json new file mode 100644 index 0000000..1424c76 --- /dev/null +++ b/node_modules/async/package.json @@ -0,0 +1,54 @@ +{ + "name": "async", + "description": "Higher-order functions and common patterns for asynchronous code", + "main": "lib/async.js", + "author": "Caolan McMahon", + "version": "0.9.2", + "keywords": [ + "async", + "callback", + "utility", + "module" + ], + "repository": { + "type": "git", + "url": "https://github.com/caolan/async.git" + }, + "bugs": { + "url": "https://github.com/caolan/async/issues" + }, + "license": "MIT", + "devDependencies": { + "nodeunit": ">0.0.0", + "uglify-js": "1.2.x", + "nodelint": ">0.0.0", + "lodash": ">=2.4.1" + }, + "jam": { + "main": "lib/async.js", + "include": [ + "lib/async.js", + "README.md", + "LICENSE" + ], + "categories": [ + "Utilities" + ] + }, + "scripts": { + "test": "nodeunit test/test-async.js" + }, + "spm": { + "main": "lib/async.js" + }, + "volo": { + "main": "lib/async.js", + "ignore": [ + "**/.*", + "node_modules", + "bower_components", + "test", + "tests" + ] + } +}
\ No newline at end of file diff --git a/node_modules/async/support/sync-package-managers.js b/node_modules/async/support/sync-package-managers.js new file mode 100644 index 0000000..30cb7c2 --- /dev/null +++ b/node_modules/async/support/sync-package-managers.js @@ -0,0 +1,53 @@ +#!/usr/bin/env node + +// This should probably be its own module but complaints about bower/etc. +// support keep coming up and I'd rather just enable the workflow here for now +// and figure out where this should live later. -- @beaugunderson + +var fs = require('fs'); +var _ = require('lodash'); + +var packageJson = require('../package.json'); + +var IGNORES = ['**/.*', 'node_modules', 'bower_components', 'test', 'tests']; +var INCLUDES = ['lib/async.js', 'README.md', 'LICENSE']; +var REPOSITORY_NAME = 'caolan/async'; + +packageJson.jam = { + main: packageJson.main, + include: INCLUDES, + categories: ['Utilities'] +}; + +packageJson.spm = { + main: packageJson.main +}; + +packageJson.volo = { + main: packageJson.main, + ignore: IGNORES +}; + +var bowerSpecific = { + moduleType: ['amd', 'globals', 'node'], + ignore: IGNORES, + authors: [packageJson.author] +}; + +var bowerInclude = ['name', 'description', 'version', 'main', 'keywords', + 'license', 'homepage', 'repository', 'devDependencies']; + +var componentSpecific = { + repository: REPOSITORY_NAME, + scripts: [packageJson.main] +}; + +var componentInclude = ['name', 'description', 'version', 'keywords', + 'license']; + +var bowerJson = _.merge({}, _.pick(packageJson, bowerInclude), bowerSpecific); +var componentJson = _.merge({}, _.pick(packageJson, componentInclude), componentSpecific); + +fs.writeFileSync('./bower.json', JSON.stringify(bowerJson, null, 2)); +fs.writeFileSync('./component.json', JSON.stringify(componentJson, null, 2)); +fs.writeFileSync('./package.json', JSON.stringify(packageJson, null, 2)); diff --git a/node_modules/balanced-match/.github/FUNDING.yml b/node_modules/balanced-match/.github/FUNDING.yml new file mode 100644 index 0000000..cea8b16 --- /dev/null +++ b/node_modules/balanced-match/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/balanced-match" +patreon: juliangruber diff --git a/node_modules/balanced-match/LICENSE.md b/node_modules/balanced-match/LICENSE.md new file mode 100644 index 0000000..2cdc8e4 --- /dev/null +++ b/node_modules/balanced-match/LICENSE.md @@ -0,0 +1,21 @@ +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md new file mode 100644 index 0000000..d2a48b6 --- /dev/null +++ b/node_modules/balanced-match/README.md @@ -0,0 +1,97 @@ +# balanced-match + +Match balanced string pairs, like `{` and `}` or `<b>` and `</b>`. Supports regular expressions as well! + +[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) +[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) + +[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) + +## Example + +Get the first matching pair of braces: + +```js +var balanced = require('balanced-match'); + +console.log(balanced('{', '}', 'pre{in{nested}}post')); +console.log(balanced('{', '}', 'pre{first}between{second}post')); +console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); +``` + +The matches are: + +```bash +$ node example.js +{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } +{ start: 3, + end: 9, + pre: 'pre', + body: 'first', + post: 'between{second}post' } +{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } +``` + +## API + +### var m = balanced(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +object with those keys: + +* **start** the index of the first match of `a` +* **end** the index of the matching `b` +* **pre** the preamble, `a` and `b` not included +* **body** the match, `a` and `b` not included +* **post** the postscript, `a` and `b` not included + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. + +### var r = balanced.range(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +array with indexes: `[ <a index>, <b index> ]`. + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install balanced-match +``` + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js new file mode 100644 index 0000000..c67a646 --- /dev/null +++ b/node_modules/balanced-match/index.js @@ -0,0 +1,62 @@ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json new file mode 100644 index 0000000..ce6073e --- /dev/null +++ b/node_modules/balanced-match/package.json @@ -0,0 +1,48 @@ +{ + "name": "balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "1.0.2", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/balanced-match.git" + }, + "homepage": "https://github.com/juliangruber/balanced-match", + "main": "index.js", + "scripts": { + "test": "tape test/test.js", + "bench": "matcha test/bench.js" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE new file mode 100644 index 0000000..de32266 --- /dev/null +++ b/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md new file mode 100644 index 0000000..6b4e0e1 --- /dev/null +++ b/node_modules/brace-expansion/README.md @@ -0,0 +1,129 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js new file mode 100644 index 0000000..0478be8 --- /dev/null +++ b/node_modules/brace-expansion/index.js @@ -0,0 +1,201 @@ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json new file mode 100644 index 0000000..a18faa8 --- /dev/null +++ b/node_modules/brace-expansion/package.json @@ -0,0 +1,47 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "1.1.11", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/chalk/index.js b/node_modules/chalk/index.js new file mode 100644 index 0000000..1cc5fa8 --- /dev/null +++ b/node_modules/chalk/index.js @@ -0,0 +1,228 @@ +'use strict'; +const escapeStringRegexp = require('escape-string-regexp'); +const ansiStyles = require('ansi-styles'); +const stdoutColor = require('supports-color').stdout; + +const template = require('./templates.js'); + +const isSimpleWindowsTerm = process.platform === 'win32' && !(process.env.TERM || '').toLowerCase().startsWith('xterm'); + +// `supportsColor.level` → `ansiStyles.color[name]` mapping +const levelMapping = ['ansi', 'ansi', 'ansi256', 'ansi16m']; + +// `color-convert` models to exclude from the Chalk API due to conflicts and such +const skipModels = new Set(['gray']); + +const styles = Object.create(null); + +function applyOptions(obj, options) { + options = options || {}; + + // Detect level if not set manually + const scLevel = stdoutColor ? stdoutColor.level : 0; + obj.level = options.level === undefined ? scLevel : options.level; + obj.enabled = 'enabled' in options ? options.enabled : obj.level > 0; +} + +function Chalk(options) { + // We check for this.template here since calling `chalk.constructor()` + // by itself will have a `this` of a previously constructed chalk object + if (!this || !(this instanceof Chalk) || this.template) { + const chalk = {}; + applyOptions(chalk, options); + + chalk.template = function () { + const args = [].slice.call(arguments); + return chalkTag.apply(null, [chalk.template].concat(args)); + }; + + Object.setPrototypeOf(chalk, Chalk.prototype); + Object.setPrototypeOf(chalk.template, chalk); + + chalk.template.constructor = Chalk; + + return chalk.template; + } + + applyOptions(this, options); +} + +// Use bright blue on Windows as the normal blue color is illegible +if (isSimpleWindowsTerm) { + ansiStyles.blue.open = '\u001B[94m'; +} + +for (const key of Object.keys(ansiStyles)) { + ansiStyles[key].closeRe = new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g'); + + styles[key] = { + get() { + const codes = ansiStyles[key]; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, key); + } + }; +} + +styles.visible = { + get() { + return build.call(this, this._styles || [], true, 'visible'); + } +}; + +ansiStyles.color.closeRe = new RegExp(escapeStringRegexp(ansiStyles.color.close), 'g'); +for (const model of Object.keys(ansiStyles.color.ansi)) { + if (skipModels.has(model)) { + continue; + } + + styles[model] = { + get() { + const level = this.level; + return function () { + const open = ansiStyles.color[levelMapping[level]][model].apply(null, arguments); + const codes = { + open, + close: ansiStyles.color.close, + closeRe: ansiStyles.color.closeRe + }; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, model); + }; + } + }; +} + +ansiStyles.bgColor.closeRe = new RegExp(escapeStringRegexp(ansiStyles.bgColor.close), 'g'); +for (const model of Object.keys(ansiStyles.bgColor.ansi)) { + if (skipModels.has(model)) { + continue; + } + + const bgModel = 'bg' + model[0].toUpperCase() + model.slice(1); + styles[bgModel] = { + get() { + const level = this.level; + return function () { + const open = ansiStyles.bgColor[levelMapping[level]][model].apply(null, arguments); + const codes = { + open, + close: ansiStyles.bgColor.close, + closeRe: ansiStyles.bgColor.closeRe + }; + return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, model); + }; + } + }; +} + +const proto = Object.defineProperties(() => {}, styles); + +function build(_styles, _empty, key) { + const builder = function () { + return applyStyle.apply(builder, arguments); + }; + + builder._styles = _styles; + builder._empty = _empty; + + const self = this; + + Object.defineProperty(builder, 'level', { + enumerable: true, + get() { + return self.level; + }, + set(level) { + self.level = level; + } + }); + + Object.defineProperty(builder, 'enabled', { + enumerable: true, + get() { + return self.enabled; + }, + set(enabled) { + self.enabled = enabled; + } + }); + + // See below for fix regarding invisible grey/dim combination on Windows + builder.hasGrey = this.hasGrey || key === 'gray' || key === 'grey'; + + // `__proto__` is used because we must return a function, but there is + // no way to create a function with a different prototype + builder.__proto__ = proto; // eslint-disable-line no-proto + + return builder; +} + +function applyStyle() { + // Support varags, but simply cast to string in case there's only one arg + const args = arguments; + const argsLen = args.length; + let str = String(arguments[0]); + + if (argsLen === 0) { + return ''; + } + + if (argsLen > 1) { + // Don't slice `arguments`, it prevents V8 optimizations + for (let a = 1; a < argsLen; a++) { + str += ' ' + args[a]; + } + } + + if (!this.enabled || this.level <= 0 || !str) { + return this._empty ? '' : str; + } + + // Turns out that on Windows dimmed gray text becomes invisible in cmd.exe, + // see https://github.com/chalk/chalk/issues/58 + // If we're on Windows and we're dealing with a gray color, temporarily make 'dim' a noop. + const originalDim = ansiStyles.dim.open; + if (isSimpleWindowsTerm && this.hasGrey) { + ansiStyles.dim.open = ''; + } + + for (const code of this._styles.slice().reverse()) { + // Replace any instances already present with a re-opening code + // otherwise only the part of the string until said closing code + // will be colored, and the rest will simply be 'plain'. + str = code.open + str.replace(code.closeRe, code.open) + code.close; + + // Close the styling before a linebreak and reopen + // after next line to fix a bleed issue on macOS + // https://github.com/chalk/chalk/pull/92 + str = str.replace(/\r?\n/g, `${code.close}$&${code.open}`); + } + + // Reset the original `dim` if we changed it to work around the Windows dimmed gray issue + ansiStyles.dim.open = originalDim; + + return str; +} + +function chalkTag(chalk, strings) { + if (!Array.isArray(strings)) { + // If chalk() was called by itself or with a string, + // return the string itself as a string. + return [].slice.call(arguments, 1).join(' '); + } + + const args = [].slice.call(arguments, 2); + const parts = [strings.raw[0]]; + + for (let i = 1; i < strings.length; i++) { + parts.push(String(args[i - 1]).replace(/[{}\\]/g, '\\$&')); + parts.push(String(strings.raw[i])); + } + + return template(chalk, parts.join('')); +} + +Object.defineProperties(Chalk.prototype, styles); + +module.exports = Chalk(); // eslint-disable-line new-cap +module.exports.supportsColor = stdoutColor; +module.exports.default = module.exports; // For TypeScript diff --git a/node_modules/chalk/index.js.flow b/node_modules/chalk/index.js.flow new file mode 100644 index 0000000..622caaa --- /dev/null +++ b/node_modules/chalk/index.js.flow @@ -0,0 +1,93 @@ +// @flow strict + +type TemplateStringsArray = $ReadOnlyArray<string>; + +export type Level = $Values<{ + None: 0, + Basic: 1, + Ansi256: 2, + TrueColor: 3 +}>; + +export type ChalkOptions = {| + enabled?: boolean, + level?: Level +|}; + +export type ColorSupport = {| + level: Level, + hasBasic: boolean, + has256: boolean, + has16m: boolean +|}; + +export interface Chalk { + (...text: string[]): string, + (text: TemplateStringsArray, ...placeholders: string[]): string, + constructor(options?: ChalkOptions): Chalk, + enabled: boolean, + level: Level, + rgb(r: number, g: number, b: number): Chalk, + hsl(h: number, s: number, l: number): Chalk, + hsv(h: number, s: number, v: number): Chalk, + hwb(h: number, w: number, b: number): Chalk, + bgHex(color: string): Chalk, + bgKeyword(color: string): Chalk, + bgRgb(r: number, g: number, b: number): Chalk, + bgHsl(h: number, s: number, l: number): Chalk, + bgHsv(h: number, s: number, v: number): Chalk, + bgHwb(h: number, w: number, b: number): Chalk, + hex(color: string): Chalk, + keyword(color: string): Chalk, + + +reset: Chalk, + +bold: Chalk, + +dim: Chalk, + +italic: Chalk, + +underline: Chalk, + +inverse: Chalk, + +hidden: Chalk, + +strikethrough: Chalk, + + +visible: Chalk, + + +black: Chalk, + +red: Chalk, + +green: Chalk, + +yellow: Chalk, + +blue: Chalk, + +magenta: Chalk, + +cyan: Chalk, + +white: Chalk, + +gray: Chalk, + +grey: Chalk, + +blackBright: Chalk, + +redBright: Chalk, + +greenBright: Chalk, + +yellowBright: Chalk, + +blueBright: Chalk, + +magentaBright: Chalk, + +cyanBright: Chalk, + +whiteBright: Chalk, + + +bgBlack: Chalk, + +bgRed: Chalk, + +bgGreen: Chalk, + +bgYellow: Chalk, + +bgBlue: Chalk, + +bgMagenta: Chalk, + +bgCyan: Chalk, + +bgWhite: Chalk, + +bgBlackBright: Chalk, + +bgRedBright: Chalk, + +bgGreenBright: Chalk, + +bgYellowBright: Chalk, + +bgBlueBright: Chalk, + +bgMagentaBright: Chalk, + +bgCyanBright: Chalk, + +bgWhiteBrigh: Chalk, + + supportsColor: ColorSupport +}; + +declare module.exports: Chalk; diff --git a/node_modules/chalk/license b/node_modules/chalk/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/chalk/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/chalk/package.json b/node_modules/chalk/package.json new file mode 100644 index 0000000..bc32468 --- /dev/null +++ b/node_modules/chalk/package.json @@ -0,0 +1,71 @@ +{ + "name": "chalk", + "version": "2.4.2", + "description": "Terminal string styling done right", + "license": "MIT", + "repository": "chalk/chalk", + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && tsc --project types && flow --max-warnings=0 && nyc ava", + "bench": "matcha benchmark.js", + "coveralls": "nyc report --reporter=text-lcov | coveralls" + }, + "files": [ + "index.js", + "templates.js", + "types/index.d.ts", + "index.js.flow" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "str", + "ansi", + "style", + "styles", + "tty", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "devDependencies": { + "ava": "*", + "coveralls": "^3.0.0", + "execa": "^0.9.0", + "flow-bin": "^0.68.0", + "import-fresh": "^2.0.0", + "matcha": "^0.7.0", + "nyc": "^11.0.2", + "resolve-from": "^4.0.0", + "typescript": "^2.5.3", + "xo": "*" + }, + "types": "types/index.d.ts", + "xo": { + "envs": [ + "node", + "mocha" + ], + "ignores": [ + "test/_flow.js" + ] + } +} diff --git a/node_modules/chalk/readme.md b/node_modules/chalk/readme.md new file mode 100644 index 0000000..d298e2c --- /dev/null +++ b/node_modules/chalk/readme.md @@ -0,0 +1,314 @@ +<h1 align="center"> + <br> + <br> + <img width="320" src="media/logo.svg" alt="Chalk"> + <br> + <br> + <br> +</h1> + +> Terminal string styling done right + +[![Build Status](https://travis-ci.org/chalk/chalk.svg?branch=master)](https://travis-ci.org/chalk/chalk) [![Coverage Status](https://coveralls.io/repos/github/chalk/chalk/badge.svg?branch=master)](https://coveralls.io/github/chalk/chalk?branch=master) [![](https://img.shields.io/badge/unicorn-approved-ff69b4.svg)](https://www.youtube.com/watch?v=9auOCbH5Ns4) [![XO code style](https://img.shields.io/badge/code_style-XO-5ed9c7.svg)](https://github.com/xojs/xo) [![Mentioned in Awesome Node.js](https://awesome.re/mentioned-badge.svg)](https://github.com/sindresorhus/awesome-nodejs) + +### [See what's new in Chalk 2](https://github.com/chalk/chalk/releases/tag/v2.0.0) + +<img src="https://cdn.rawgit.com/chalk/ansi-styles/8261697c95bf34b6c7767e2cbe9941a851d59385/screenshot.svg" alt="" width="900"> + + +## Highlights + +- Expressive API +- Highly performant +- Ability to nest styles +- [256/Truecolor color support](#256-and-truecolor-color-support) +- Auto-detects color support +- Doesn't extend `String.prototype` +- Clean and focused +- Actively maintained +- [Used by ~23,000 packages](https://www.npmjs.com/browse/depended/chalk) as of December 31, 2017 + + +## Install + +```console +$ npm install chalk +``` + +<a href="https://www.patreon.com/sindresorhus"> + <img src="https://c5.patreon.com/external/logo/become_a_patron_button@2x.png" width="160"> +</a> + + +## Usage + +```js +const chalk = require('chalk'); + +console.log(chalk.blue('Hello world!')); +``` + +Chalk comes with an easy to use composable API where you just chain and nest the styles you want. + +```js +const chalk = require('chalk'); +const log = console.log; + +// Combine styled and normal strings +log(chalk.blue('Hello') + ' World' + chalk.red('!')); + +// Compose multiple styles using the chainable API +log(chalk.blue.bgRed.bold('Hello world!')); + +// Pass in multiple arguments +log(chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz')); + +// Nest styles +log(chalk.red('Hello', chalk.underline.bgBlue('world') + '!')); + +// Nest styles of the same type even (color, underline, background) +log(chalk.green( + 'I am a green line ' + + chalk.blue.underline.bold('with a blue substring') + + ' that becomes green again!' +)); + +// ES2015 template literal +log(` +CPU: ${chalk.red('90%')} +RAM: ${chalk.green('40%')} +DISK: ${chalk.yellow('70%')} +`); + +// ES2015 tagged template literal +log(chalk` +CPU: {red ${cpu.totalPercent}%} +RAM: {green ${ram.used / ram.total * 100}%} +DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%} +`); + +// Use RGB colors in terminal emulators that support it. +log(chalk.keyword('orange')('Yay for orange colored text!')); +log(chalk.rgb(123, 45, 67).underline('Underlined reddish color')); +log(chalk.hex('#DEADED').bold('Bold gray!')); +``` + +Easily define your own themes: + +```js +const chalk = require('chalk'); + +const error = chalk.bold.red; +const warning = chalk.keyword('orange'); + +console.log(error('Error!')); +console.log(warning('Warning!')); +``` + +Take advantage of console.log [string substitution](https://nodejs.org/docs/latest/api/console.html#console_console_log_data_args): + +```js +const name = 'Sindre'; +console.log(chalk.green('Hello %s'), name); +//=> 'Hello Sindre' +``` + + +## API + +### chalk.`<style>[.<style>...](string, [string...])` + +Example: `chalk.red.bold.underline('Hello', 'world');` + +Chain [styles](#styles) and call the last one as a method with a string argument. Order doesn't matter, and later styles take precedent in case of a conflict. This simply means that `chalk.red.yellow.green` is equivalent to `chalk.green`. + +Multiple arguments will be separated by space. + +### chalk.enabled + +Color support is automatically detected, as is the level (see `chalk.level`). However, if you'd like to simply enable/disable Chalk, you can do so via the `.enabled` property. + +Chalk is enabled by default unless explicitly disabled via the constructor or `chalk.level` is `0`. + +If you need to change this in a reusable module, create a new instance: + +```js +const ctx = new chalk.constructor({enabled: false}); +``` + +### chalk.level + +Color support is automatically detected, but you can override it by setting the `level` property. You should however only do this in your own code as it applies globally to all Chalk consumers. + +If you need to change this in a reusable module, create a new instance: + +```js +const ctx = new chalk.constructor({level: 0}); +``` + +Levels are as follows: + +0. All colors disabled +1. Basic color support (16 colors) +2. 256 color support +3. Truecolor support (16 million colors) + +### chalk.supportsColor + +Detect whether the terminal [supports color](https://github.com/chalk/supports-color). Used internally and handled for you, but exposed for convenience. + +Can be overridden by the user with the flags `--color` and `--no-color`. For situations where using `--color` is not possible, add the environment variable `FORCE_COLOR=1` to forcefully enable color or `FORCE_COLOR=0` to forcefully disable. The use of `FORCE_COLOR` overrides all other color support checks. + +Explicit 256/Truecolor mode can be enabled using the `--color=256` and `--color=16m` flags, respectively. + + +## Styles + +### Modifiers + +- `reset` +- `bold` +- `dim` +- `italic` *(Not widely supported)* +- `underline` +- `inverse` +- `hidden` +- `strikethrough` *(Not widely supported)* +- `visible` (Text is emitted only if enabled) + +### Colors + +- `black` +- `red` +- `green` +- `yellow` +- `blue` *(On Windows the bright version is used since normal blue is illegible)* +- `magenta` +- `cyan` +- `white` +- `gray` ("bright black") +- `redBright` +- `greenBright` +- `yellowBright` +- `blueBright` +- `magentaBright` +- `cyanBright` +- `whiteBright` + +### Background colors + +- `bgBlack` +- `bgRed` +- `bgGreen` +- `bgYellow` +- `bgBlue` +- `bgMagenta` +- `bgCyan` +- `bgWhite` +- `bgBlackBright` +- `bgRedBright` +- `bgGreenBright` +- `bgYellowBright` +- `bgBlueBright` +- `bgMagentaBright` +- `bgCyanBright` +- `bgWhiteBright` + + +## Tagged template literal + +Chalk can be used as a [tagged template literal](http://exploringjs.com/es6/ch_template-literals.html#_tagged-template-literals). + +```js +const chalk = require('chalk'); + +const miles = 18; +const calculateFeet = miles => miles * 5280; + +console.log(chalk` + There are {bold 5280 feet} in a mile. + In {bold ${miles} miles}, there are {green.bold ${calculateFeet(miles)} feet}. +`); +``` + +Blocks are delimited by an opening curly brace (`{`), a style, some content, and a closing curly brace (`}`). + +Template styles are chained exactly like normal Chalk styles. The following two statements are equivalent: + +```js +console.log(chalk.bold.rgb(10, 100, 200)('Hello!')); +console.log(chalk`{bold.rgb(10,100,200) Hello!}`); +``` + +Note that function styles (`rgb()`, `hsl()`, `keyword()`, etc.) may not contain spaces between parameters. + +All interpolated values (`` chalk`${foo}` ``) are converted to strings via the `.toString()` method. All curly braces (`{` and `}`) in interpolated value strings are escaped. + + +## 256 and Truecolor color support + +Chalk supports 256 colors and [Truecolor](https://gist.github.com/XVilka/8346728) (16 million colors) on supported terminal apps. + +Colors are downsampled from 16 million RGB values to an ANSI color format that is supported by the terminal emulator (or by specifying `{level: n}` as a Chalk option). For example, Chalk configured to run at level 1 (basic color support) will downsample an RGB value of #FF0000 (red) to 31 (ANSI escape for red). + +Examples: + +- `chalk.hex('#DEADED').underline('Hello, world!')` +- `chalk.keyword('orange')('Some orange text')` +- `chalk.rgb(15, 100, 204).inverse('Hello!')` + +Background versions of these models are prefixed with `bg` and the first level of the module capitalized (e.g. `keyword` for foreground colors and `bgKeyword` for background colors). + +- `chalk.bgHex('#DEADED').underline('Hello, world!')` +- `chalk.bgKeyword('orange')('Some orange text')` +- `chalk.bgRgb(15, 100, 204).inverse('Hello!')` + +The following color models can be used: + +- [`rgb`](https://en.wikipedia.org/wiki/RGB_color_model) - Example: `chalk.rgb(255, 136, 0).bold('Orange!')` +- [`hex`](https://en.wikipedia.org/wiki/Web_colors#Hex_triplet) - Example: `chalk.hex('#FF8800').bold('Orange!')` +- [`keyword`](https://www.w3.org/wiki/CSS/Properties/color/keywords) (CSS keywords) - Example: `chalk.keyword('orange').bold('Orange!')` +- [`hsl`](https://en.wikipedia.org/wiki/HSL_and_HSV) - Example: `chalk.hsl(32, 100, 50).bold('Orange!')` +- [`hsv`](https://en.wikipedia.org/wiki/HSL_and_HSV) - Example: `chalk.hsv(32, 100, 100).bold('Orange!')` +- [`hwb`](https://en.wikipedia.org/wiki/HWB_color_model) - Example: `chalk.hwb(32, 0, 50).bold('Orange!')` +- `ansi16` +- `ansi256` + + +## Windows + +If you're on Windows, do yourself a favor and use [`cmder`](http://cmder.net/) instead of `cmd.exe`. + + +## Origin story + +[colors.js](https://github.com/Marak/colors.js) used to be the most popular string styling module, but it has serious deficiencies like extending `String.prototype` which causes all kinds of [problems](https://github.com/yeoman/yo/issues/68) and the package is unmaintained. Although there are other packages, they either do too much or not enough. Chalk is a clean and focused alternative. + + +## Related + +- [chalk-cli](https://github.com/chalk/chalk-cli) - CLI for this module +- [ansi-styles](https://github.com/chalk/ansi-styles) - ANSI escape codes for styling strings in the terminal +- [supports-color](https://github.com/chalk/supports-color) - Detect whether a terminal supports color +- [strip-ansi](https://github.com/chalk/strip-ansi) - Strip ANSI escape codes +- [strip-ansi-stream](https://github.com/chalk/strip-ansi-stream) - Strip ANSI escape codes from a stream +- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes +- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes +- [wrap-ansi](https://github.com/chalk/wrap-ansi) - Wordwrap a string with ANSI escape codes +- [slice-ansi](https://github.com/chalk/slice-ansi) - Slice a string with ANSI escape codes +- [color-convert](https://github.com/qix-/color-convert) - Converts colors between different models +- [chalk-animation](https://github.com/bokub/chalk-animation) - Animate strings in the terminal +- [gradient-string](https://github.com/bokub/gradient-string) - Apply color gradients to strings +- [chalk-pipe](https://github.com/LitoMore/chalk-pipe) - Create chalk style schemes with simpler style strings +- [terminal-link](https://github.com/sindresorhus/terminal-link) - Create clickable links in the terminal + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/node_modules/chalk/templates.js b/node_modules/chalk/templates.js new file mode 100644 index 0000000..dbdf9b2 --- /dev/null +++ b/node_modules/chalk/templates.js @@ -0,0 +1,128 @@ +'use strict'; +const TEMPLATE_REGEX = /(?:\\(u[a-f\d]{4}|x[a-f\d]{2}|.))|(?:\{(~)?(\w+(?:\([^)]*\))?(?:\.\w+(?:\([^)]*\))?)*)(?:[ \t]|(?=\r?\n)))|(\})|((?:.|[\r\n\f])+?)/gi; +const STYLE_REGEX = /(?:^|\.)(\w+)(?:\(([^)]*)\))?/g; +const STRING_REGEX = /^(['"])((?:\\.|(?!\1)[^\\])*)\1$/; +const ESCAPE_REGEX = /\\(u[a-f\d]{4}|x[a-f\d]{2}|.)|([^\\])/gi; + +const ESCAPES = new Map([ + ['n', '\n'], + ['r', '\r'], + ['t', '\t'], + ['b', '\b'], + ['f', '\f'], + ['v', '\v'], + ['0', '\0'], + ['\\', '\\'], + ['e', '\u001B'], + ['a', '\u0007'] +]); + +function unescape(c) { + if ((c[0] === 'u' && c.length === 5) || (c[0] === 'x' && c.length === 3)) { + return String.fromCharCode(parseInt(c.slice(1), 16)); + } + + return ESCAPES.get(c) || c; +} + +function parseArguments(name, args) { + const results = []; + const chunks = args.trim().split(/\s*,\s*/g); + let matches; + + for (const chunk of chunks) { + if (!isNaN(chunk)) { + results.push(Number(chunk)); + } else if ((matches = chunk.match(STRING_REGEX))) { + results.push(matches[2].replace(ESCAPE_REGEX, (m, escape, chr) => escape ? unescape(escape) : chr)); + } else { + throw new Error(`Invalid Chalk template style argument: ${chunk} (in style '${name}')`); + } + } + + return results; +} + +function parseStyle(style) { + STYLE_REGEX.lastIndex = 0; + + const results = []; + let matches; + + while ((matches = STYLE_REGEX.exec(style)) !== null) { + const name = matches[1]; + + if (matches[2]) { + const args = parseArguments(name, matches[2]); + results.push([name].concat(args)); + } else { + results.push([name]); + } + } + + return results; +} + +function buildStyle(chalk, styles) { + const enabled = {}; + + for (const layer of styles) { + for (const style of layer.styles) { + enabled[style[0]] = layer.inverse ? null : style.slice(1); + } + } + + let current = chalk; + for (const styleName of Object.keys(enabled)) { + if (Array.isArray(enabled[styleName])) { + if (!(styleName in current)) { + throw new Error(`Unknown Chalk style: ${styleName}`); + } + + if (enabled[styleName].length > 0) { + current = current[styleName].apply(current, enabled[styleName]); + } else { + current = current[styleName]; + } + } + } + + return current; +} + +module.exports = (chalk, tmp) => { + const styles = []; + const chunks = []; + let chunk = []; + + // eslint-disable-next-line max-params + tmp.replace(TEMPLATE_REGEX, (m, escapeChar, inverse, style, close, chr) => { + if (escapeChar) { + chunk.push(unescape(escapeChar)); + } else if (style) { + const str = chunk.join(''); + chunk = []; + chunks.push(styles.length === 0 ? str : buildStyle(chalk, styles)(str)); + styles.push({inverse, styles: parseStyle(style)}); + } else if (close) { + if (styles.length === 0) { + throw new Error('Found extraneous } in Chalk template literal'); + } + + chunks.push(buildStyle(chalk, styles)(chunk.join(''))); + chunk = []; + styles.pop(); + } else { + chunk.push(chr); + } + }); + + chunks.push(chunk.join('')); + + if (styles.length > 0) { + const errMsg = `Chalk template literal is missing ${styles.length} closing bracket${styles.length === 1 ? '' : 's'} (\`}\`)`; + throw new Error(errMsg); + } + + return chunks.join(''); +}; diff --git a/node_modules/chalk/types/index.d.ts b/node_modules/chalk/types/index.d.ts new file mode 100644 index 0000000..b4e4dc5 --- /dev/null +++ b/node_modules/chalk/types/index.d.ts @@ -0,0 +1,97 @@ +// Type definitions for Chalk +// Definitions by: Thomas Sauer <https://github.com/t-sauer> + +export const enum Level { + None = 0, + Basic = 1, + Ansi256 = 2, + TrueColor = 3 +} + +export interface ChalkOptions { + enabled?: boolean; + level?: Level; +} + +export interface ChalkConstructor { + new (options?: ChalkOptions): Chalk; + (options?: ChalkOptions): Chalk; +} + +export interface ColorSupport { + level: Level; + hasBasic: boolean; + has256: boolean; + has16m: boolean; +} + +export interface Chalk { + (...text: string[]): string; + (text: TemplateStringsArray, ...placeholders: string[]): string; + constructor: ChalkConstructor; + enabled: boolean; + level: Level; + rgb(r: number, g: number, b: number): this; + hsl(h: number, s: number, l: number): this; + hsv(h: number, s: number, v: number): this; + hwb(h: number, w: number, b: number): this; + bgHex(color: string): this; + bgKeyword(color: string): this; + bgRgb(r: number, g: number, b: number): this; + bgHsl(h: number, s: number, l: number): this; + bgHsv(h: number, s: number, v: number): this; + bgHwb(h: number, w: number, b: number): this; + hex(color: string): this; + keyword(color: string): this; + + readonly reset: this; + readonly bold: this; + readonly dim: this; + readonly italic: this; + readonly underline: this; + readonly inverse: this; + readonly hidden: this; + readonly strikethrough: this; + + readonly visible: this; + + readonly black: this; + readonly red: this; + readonly green: this; + readonly yellow: this; + readonly blue: this; + readonly magenta: this; + readonly cyan: this; + readonly white: this; + readonly gray: this; + readonly grey: this; + readonly blackBright: this; + readonly redBright: this; + readonly greenBright: this; + readonly yellowBright: this; + readonly blueBright: this; + readonly magentaBright: this; + readonly cyanBright: this; + readonly whiteBright: this; + + readonly bgBlack: this; + readonly bgRed: this; + readonly bgGreen: this; + readonly bgYellow: this; + readonly bgBlue: this; + readonly bgMagenta: this; + readonly bgCyan: this; + readonly bgWhite: this; + readonly bgBlackBright: this; + readonly bgRedBright: this; + readonly bgGreenBright: this; + readonly bgYellowBright: this; + readonly bgBlueBright: this; + readonly bgMagentaBright: this; + readonly bgCyanBright: this; + readonly bgWhiteBright: this; +} + +declare const chalk: Chalk & { supportsColor: ColorSupport }; + +export default chalk diff --git a/node_modules/color-convert/CHANGELOG.md b/node_modules/color-convert/CHANGELOG.md new file mode 100644 index 0000000..0a7bce4 --- /dev/null +++ b/node_modules/color-convert/CHANGELOG.md @@ -0,0 +1,54 @@ +# 1.0.0 - 2016-01-07 + +- Removed: unused speed test +- Added: Automatic routing between previously unsupported conversions +([#27](https://github.com/Qix-/color-convert/pull/27)) +- Removed: `xxx2xxx()` and `xxx2xxxRaw()` functions +([#27](https://github.com/Qix-/color-convert/pull/27)) +- Removed: `convert()` class +([#27](https://github.com/Qix-/color-convert/pull/27)) +- Changed: all functions to lookup dictionary +([#27](https://github.com/Qix-/color-convert/pull/27)) +- Changed: `ansi` to `ansi256` +([#27](https://github.com/Qix-/color-convert/pull/27)) +- Fixed: argument grouping for functions requiring only one argument +([#27](https://github.com/Qix-/color-convert/pull/27)) + +# 0.6.0 - 2015-07-23 + +- Added: methods to handle +[ANSI](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors) 16/256 colors: + - rgb2ansi16 + - rgb2ansi + - hsl2ansi16 + - hsl2ansi + - hsv2ansi16 + - hsv2ansi + - hwb2ansi16 + - hwb2ansi + - cmyk2ansi16 + - cmyk2ansi + - keyword2ansi16 + - keyword2ansi + - ansi162rgb + - ansi162hsl + - ansi162hsv + - ansi162hwb + - ansi162cmyk + - ansi162keyword + - ansi2rgb + - ansi2hsl + - ansi2hsv + - ansi2hwb + - ansi2cmyk + - ansi2keyword +([#18](https://github.com/harthur/color-convert/pull/18)) + +# 0.5.3 - 2015-06-02 + +- Fixed: hsl2hsv does not return `NaN` anymore when using `[0,0,0]` +([#15](https://github.com/harthur/color-convert/issues/15)) + +--- + +Check out commit logs for older releases diff --git a/node_modules/color-convert/LICENSE b/node_modules/color-convert/LICENSE new file mode 100644 index 0000000..5b4c386 --- /dev/null +++ b/node_modules/color-convert/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) 2011-2016 Heather Arthur <fayearthur@gmail.com> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/color-convert/README.md b/node_modules/color-convert/README.md new file mode 100644 index 0000000..d4b08fc --- /dev/null +++ b/node_modules/color-convert/README.md @@ -0,0 +1,68 @@ +# color-convert + +[![Build Status](https://travis-ci.org/Qix-/color-convert.svg?branch=master)](https://travis-ci.org/Qix-/color-convert) + +Color-convert is a color conversion library for JavaScript and node. +It converts all ways between `rgb`, `hsl`, `hsv`, `hwb`, `cmyk`, `ansi`, `ansi16`, `hex` strings, and CSS `keyword`s (will round to closest): + +```js +var convert = require('color-convert'); + +convert.rgb.hsl(140, 200, 100); // [96, 48, 59] +convert.keyword.rgb('blue'); // [0, 0, 255] + +var rgbChannels = convert.rgb.channels; // 3 +var cmykChannels = convert.cmyk.channels; // 4 +var ansiChannels = convert.ansi16.channels; // 1 +``` + +# Install + +```console +$ npm install color-convert +``` + +# API + +Simply get the property of the _from_ and _to_ conversion that you're looking for. + +All functions have a rounded and unrounded variant. By default, return values are rounded. To get the unrounded (raw) results, simply tack on `.raw` to the function. + +All 'from' functions have a hidden property called `.channels` that indicates the number of channels the function expects (not including alpha). + +```js +var convert = require('color-convert'); + +// Hex to LAB +convert.hex.lab('DEADBF'); // [ 76, 21, -2 ] +convert.hex.lab.raw('DEADBF'); // [ 75.56213190997677, 20.653827952644754, -2.290532499330533 ] + +// RGB to CMYK +convert.rgb.cmyk(167, 255, 4); // [ 35, 0, 98, 0 ] +convert.rgb.cmyk.raw(167, 255, 4); // [ 34.509803921568626, 0, 98.43137254901961, 0 ] +``` + +### Arrays +All functions that accept multiple arguments also support passing an array. + +Note that this does **not** apply to functions that convert from a color that only requires one value (e.g. `keyword`, `ansi256`, `hex`, etc.) + +```js +var convert = require('color-convert'); + +convert.rgb.hex(123, 45, 67); // '7B2D43' +convert.rgb.hex([123, 45, 67]); // '7B2D43' +``` + +## Routing + +Conversions that don't have an _explicitly_ defined conversion (in [conversions.js](conversions.js)), but can be converted by means of sub-conversions (e.g. XYZ -> **RGB** -> CMYK), are automatically routed together. This allows just about any color model supported by `color-convert` to be converted to any other model, so long as a sub-conversion path exists. This is also true for conversions requiring more than one step in between (e.g. LCH -> **LAB** -> **XYZ** -> **RGB** -> Hex). + +Keep in mind that extensive conversions _may_ result in a loss of precision, and exist only to be complete. For a list of "direct" (single-step) conversions, see [conversions.js](conversions.js). + +# Contribute + +If there is a new model you would like to support, or want to add a direct conversion between two existing models, please send us a pull request. + +# License +Copyright © 2011-2016, Heather Arthur and Josh Junon. Licensed under the [MIT License](LICENSE). diff --git a/node_modules/color-convert/conversions.js b/node_modules/color-convert/conversions.js new file mode 100644 index 0000000..3217200 --- /dev/null +++ b/node_modules/color-convert/conversions.js @@ -0,0 +1,868 @@ +/* MIT license */ +var cssKeywords = require('color-name'); + +// NOTE: conversions should only return primitive values (i.e. arrays, or +// values that give correct `typeof` results). +// do not use box values types (i.e. Number(), String(), etc.) + +var reverseKeywords = {}; +for (var key in cssKeywords) { + if (cssKeywords.hasOwnProperty(key)) { + reverseKeywords[cssKeywords[key]] = key; + } +} + +var convert = module.exports = { + rgb: {channels: 3, labels: 'rgb'}, + hsl: {channels: 3, labels: 'hsl'}, + hsv: {channels: 3, labels: 'hsv'}, + hwb: {channels: 3, labels: 'hwb'}, + cmyk: {channels: 4, labels: 'cmyk'}, + xyz: {channels: 3, labels: 'xyz'}, + lab: {channels: 3, labels: 'lab'}, + lch: {channels: 3, labels: 'lch'}, + hex: {channels: 1, labels: ['hex']}, + keyword: {channels: 1, labels: ['keyword']}, + ansi16: {channels: 1, labels: ['ansi16']}, + ansi256: {channels: 1, labels: ['ansi256']}, + hcg: {channels: 3, labels: ['h', 'c', 'g']}, + apple: {channels: 3, labels: ['r16', 'g16', 'b16']}, + gray: {channels: 1, labels: ['gray']} +}; + +// hide .channels and .labels properties +for (var model in convert) { + if (convert.hasOwnProperty(model)) { + if (!('channels' in convert[model])) { + throw new Error('missing channels property: ' + model); + } + + if (!('labels' in convert[model])) { + throw new Error('missing channel labels property: ' + model); + } + + if (convert[model].labels.length !== convert[model].channels) { + throw new Error('channel and label counts mismatch: ' + model); + } + + var channels = convert[model].channels; + var labels = convert[model].labels; + delete convert[model].channels; + delete convert[model].labels; + Object.defineProperty(convert[model], 'channels', {value: channels}); + Object.defineProperty(convert[model], 'labels', {value: labels}); + } +} + +convert.rgb.hsl = function (rgb) { + var r = rgb[0] / 255; + var g = rgb[1] / 255; + var b = rgb[2] / 255; + var min = Math.min(r, g, b); + var max = Math.max(r, g, b); + var delta = max - min; + var h; + var s; + var l; + + if (max === min) { + h = 0; + } else if (r === max) { + h = (g - b) / delta; + } else if (g === max) { + h = 2 + (b - r) / delta; + } else if (b === max) { + h = 4 + (r - g) / delta; + } + + h = Math.min(h * 60, 360); + + if (h < 0) { + h += 360; + } + + l = (min + max) / 2; + + if (max === min) { + s = 0; + } else if (l <= 0.5) { + s = delta / (max + min); + } else { + s = delta / (2 - max - min); + } + + return [h, s * 100, l * 100]; +}; + +convert.rgb.hsv = function (rgb) { + var rdif; + var gdif; + var bdif; + var h; + var s; + + var r = rgb[0] / 255; + var g = rgb[1] / 255; + var b = rgb[2] / 255; + var v = Math.max(r, g, b); + var diff = v - Math.min(r, g, b); + var diffc = function (c) { + return (v - c) / 6 / diff + 1 / 2; + }; + + if (diff === 0) { + h = s = 0; + } else { + s = diff / v; + rdif = diffc(r); + gdif = diffc(g); + bdif = diffc(b); + + if (r === v) { + h = bdif - gdif; + } else if (g === v) { + h = (1 / 3) + rdif - bdif; + } else if (b === v) { + h = (2 / 3) + gdif - rdif; + } + if (h < 0) { + h += 1; + } else if (h > 1) { + h -= 1; + } + } + + return [ + h * 360, + s * 100, + v * 100 + ]; +}; + +convert.rgb.hwb = function (rgb) { + var r = rgb[0]; + var g = rgb[1]; + var b = rgb[2]; + var h = convert.rgb.hsl(rgb)[0]; + var w = 1 / 255 * Math.min(r, Math.min(g, b)); + + b = 1 - 1 / 255 * Math.max(r, Math.max(g, b)); + + return [h, w * 100, b * 100]; +}; + +convert.rgb.cmyk = function (rgb) { + var r = rgb[0] / 255; + var g = rgb[1] / 255; + var b = rgb[2] / 255; + var c; + var m; + var y; + var k; + + k = Math.min(1 - r, 1 - g, 1 - b); + c = (1 - r - k) / (1 - k) || 0; + m = (1 - g - k) / (1 - k) || 0; + y = (1 - b - k) / (1 - k) || 0; + + return [c * 100, m * 100, y * 100, k * 100]; +}; + +/** + * See https://en.m.wikipedia.org/wiki/Euclidean_distance#Squared_Euclidean_distance + * */ +function comparativeDistance(x, y) { + return ( + Math.pow(x[0] - y[0], 2) + + Math.pow(x[1] - y[1], 2) + + Math.pow(x[2] - y[2], 2) + ); +} + +convert.rgb.keyword = function (rgb) { + var reversed = reverseKeywords[rgb]; + if (reversed) { + return reversed; + } + + var currentClosestDistance = Infinity; + var currentClosestKeyword; + + for (var keyword in cssKeywords) { + if (cssKeywords.hasOwnProperty(keyword)) { + var value = cssKeywords[keyword]; + + // Compute comparative distance + var distance = comparativeDistance(rgb, value); + + // Check if its less, if so set as closest + if (distance < currentClosestDistance) { + currentClosestDistance = distance; + currentClosestKeyword = keyword; + } + } + } + + return currentClosestKeyword; +}; + +convert.keyword.rgb = function (keyword) { + return cssKeywords[keyword]; +}; + +convert.rgb.xyz = function (rgb) { + var r = rgb[0] / 255; + var g = rgb[1] / 255; + var b = rgb[2] / 255; + + // assume sRGB + r = r > 0.04045 ? Math.pow(((r + 0.055) / 1.055), 2.4) : (r / 12.92); + g = g > 0.04045 ? Math.pow(((g + 0.055) / 1.055), 2.4) : (g / 12.92); + b = b > 0.04045 ? Math.pow(((b + 0.055) / 1.055), 2.4) : (b / 12.92); + + var x = (r * 0.4124) + (g * 0.3576) + (b * 0.1805); + var y = (r * 0.2126) + (g * 0.7152) + (b * 0.0722); + var z = (r * 0.0193) + (g * 0.1192) + (b * 0.9505); + + return [x * 100, y * 100, z * 100]; +}; + +convert.rgb.lab = function (rgb) { + var xyz = convert.rgb.xyz(rgb); + var x = xyz[0]; + var y = xyz[1]; + var z = xyz[2]; + var l; + var a; + var b; + + x /= 95.047; + y /= 100; + z /= 108.883; + + x = x > 0.008856 ? Math.pow(x, 1 / 3) : (7.787 * x) + (16 / 116); + y = y > 0.008856 ? Math.pow(y, 1 / 3) : (7.787 * y) + (16 / 116); + z = z > 0.008856 ? Math.pow(z, 1 / 3) : (7.787 * z) + (16 / 116); + + l = (116 * y) - 16; + a = 500 * (x - y); + b = 200 * (y - z); + + return [l, a, b]; +}; + +convert.hsl.rgb = function (hsl) { + var h = hsl[0] / 360; + var s = hsl[1] / 100; + var l = hsl[2] / 100; + var t1; + var t2; + var t3; + var rgb; + var val; + + if (s === 0) { + val = l * 255; + return [val, val, val]; + } + + if (l < 0.5) { + t2 = l * (1 + s); + } else { + t2 = l + s - l * s; + } + + t1 = 2 * l - t2; + + rgb = [0, 0, 0]; + for (var i = 0; i < 3; i++) { + t3 = h + 1 / 3 * -(i - 1); + if (t3 < 0) { + t3++; + } + if (t3 > 1) { + t3--; + } + + if (6 * t3 < 1) { + val = t1 + (t2 - t1) * 6 * t3; + } else if (2 * t3 < 1) { + val = t2; + } else if (3 * t3 < 2) { + val = t1 + (t2 - t1) * (2 / 3 - t3) * 6; + } else { + val = t1; + } + + rgb[i] = val * 255; + } + + return rgb; +}; + +convert.hsl.hsv = function (hsl) { + var h = hsl[0]; + var s = hsl[1] / 100; + var l = hsl[2] / 100; + var smin = s; + var lmin = Math.max(l, 0.01); + var sv; + var v; + + l *= 2; + s *= (l <= 1) ? l : 2 - l; + smin *= lmin <= 1 ? lmin : 2 - lmin; + v = (l + s) / 2; + sv = l === 0 ? (2 * smin) / (lmin + smin) : (2 * s) / (l + s); + + return [h, sv * 100, v * 100]; +}; + +convert.hsv.rgb = function (hsv) { + var h = hsv[0] / 60; + var s = hsv[1] / 100; + var v = hsv[2] / 100; + var hi = Math.floor(h) % 6; + + var f = h - Math.floor(h); + var p = 255 * v * (1 - s); + var q = 255 * v * (1 - (s * f)); + var t = 255 * v * (1 - (s * (1 - f))); + v *= 255; + + switch (hi) { + case 0: + return [v, t, p]; + case 1: + return [q, v, p]; + case 2: + return [p, v, t]; + case 3: + return [p, q, v]; + case 4: + return [t, p, v]; + case 5: + return [v, p, q]; + } +}; + +convert.hsv.hsl = function (hsv) { + var h = hsv[0]; + var s = hsv[1] / 100; + var v = hsv[2] / 100; + var vmin = Math.max(v, 0.01); + var lmin; + var sl; + var l; + + l = (2 - s) * v; + lmin = (2 - s) * vmin; + sl = s * vmin; + sl /= (lmin <= 1) ? lmin : 2 - lmin; + sl = sl || 0; + l /= 2; + + return [h, sl * 100, l * 100]; +}; + +// http://dev.w3.org/csswg/css-color/#hwb-to-rgb +convert.hwb.rgb = function (hwb) { + var h = hwb[0] / 360; + var wh = hwb[1] / 100; + var bl = hwb[2] / 100; + var ratio = wh + bl; + var i; + var v; + var f; + var n; + + // wh + bl cant be > 1 + if (ratio > 1) { + wh /= ratio; + bl /= ratio; + } + + i = Math.floor(6 * h); + v = 1 - bl; + f = 6 * h - i; + + if ((i & 0x01) !== 0) { + f = 1 - f; + } + + n = wh + f * (v - wh); // linear interpolation + + var r; + var g; + var b; + switch (i) { + default: + case 6: + case 0: r = v; g = n; b = wh; break; + case 1: r = n; g = v; b = wh; break; + case 2: r = wh; g = v; b = n; break; + case 3: r = wh; g = n; b = v; break; + case 4: r = n; g = wh; b = v; break; + case 5: r = v; g = wh; b = n; break; + } + + return [r * 255, g * 255, b * 255]; +}; + +convert.cmyk.rgb = function (cmyk) { + var c = cmyk[0] / 100; + var m = cmyk[1] / 100; + var y = cmyk[2] / 100; + var k = cmyk[3] / 100; + var r; + var g; + var b; + + r = 1 - Math.min(1, c * (1 - k) + k); + g = 1 - Math.min(1, m * (1 - k) + k); + b = 1 - Math.min(1, y * (1 - k) + k); + + return [r * 255, g * 255, b * 255]; +}; + +convert.xyz.rgb = function (xyz) { + var x = xyz[0] / 100; + var y = xyz[1] / 100; + var z = xyz[2] / 100; + var r; + var g; + var b; + + r = (x * 3.2406) + (y * -1.5372) + (z * -0.4986); + g = (x * -0.9689) + (y * 1.8758) + (z * 0.0415); + b = (x * 0.0557) + (y * -0.2040) + (z * 1.0570); + + // assume sRGB + r = r > 0.0031308 + ? ((1.055 * Math.pow(r, 1.0 / 2.4)) - 0.055) + : r * 12.92; + + g = g > 0.0031308 + ? ((1.055 * Math.pow(g, 1.0 / 2.4)) - 0.055) + : g * 12.92; + + b = b > 0.0031308 + ? ((1.055 * Math.pow(b, 1.0 / 2.4)) - 0.055) + : b * 12.92; + + r = Math.min(Math.max(0, r), 1); + g = Math.min(Math.max(0, g), 1); + b = Math.min(Math.max(0, b), 1); + + return [r * 255, g * 255, b * 255]; +}; + +convert.xyz.lab = function (xyz) { + var x = xyz[0]; + var y = xyz[1]; + var z = xyz[2]; + var l; + var a; + var b; + + x /= 95.047; + y /= 100; + z /= 108.883; + + x = x > 0.008856 ? Math.pow(x, 1 / 3) : (7.787 * x) + (16 / 116); + y = y > 0.008856 ? Math.pow(y, 1 / 3) : (7.787 * y) + (16 / 116); + z = z > 0.008856 ? Math.pow(z, 1 / 3) : (7.787 * z) + (16 / 116); + + l = (116 * y) - 16; + a = 500 * (x - y); + b = 200 * (y - z); + + return [l, a, b]; +}; + +convert.lab.xyz = function (lab) { + var l = lab[0]; + var a = lab[1]; + var b = lab[2]; + var x; + var y; + var z; + + y = (l + 16) / 116; + x = a / 500 + y; + z = y - b / 200; + + var y2 = Math.pow(y, 3); + var x2 = Math.pow(x, 3); + var z2 = Math.pow(z, 3); + y = y2 > 0.008856 ? y2 : (y - 16 / 116) / 7.787; + x = x2 > 0.008856 ? x2 : (x - 16 / 116) / 7.787; + z = z2 > 0.008856 ? z2 : (z - 16 / 116) / 7.787; + + x *= 95.047; + y *= 100; + z *= 108.883; + + return [x, y, z]; +}; + +convert.lab.lch = function (lab) { + var l = lab[0]; + var a = lab[1]; + var b = lab[2]; + var hr; + var h; + var c; + + hr = Math.atan2(b, a); + h = hr * 360 / 2 / Math.PI; + + if (h < 0) { + h += 360; + } + + c = Math.sqrt(a * a + b * b); + + return [l, c, h]; +}; + +convert.lch.lab = function (lch) { + var l = lch[0]; + var c = lch[1]; + var h = lch[2]; + var a; + var b; + var hr; + + hr = h / 360 * 2 * Math.PI; + a = c * Math.cos(hr); + b = c * Math.sin(hr); + + return [l, a, b]; +}; + +convert.rgb.ansi16 = function (args) { + var r = args[0]; + var g = args[1]; + var b = args[2]; + var value = 1 in arguments ? arguments[1] : convert.rgb.hsv(args)[2]; // hsv -> ansi16 optimization + + value = Math.round(value / 50); + + if (value === 0) { + return 30; + } + + var ansi = 30 + + ((Math.round(b / 255) << 2) + | (Math.round(g / 255) << 1) + | Math.round(r / 255)); + + if (value === 2) { + ansi += 60; + } + + return ansi; +}; + +convert.hsv.ansi16 = function (args) { + // optimization here; we already know the value and don't need to get + // it converted for us. + return convert.rgb.ansi16(convert.hsv.rgb(args), args[2]); +}; + +convert.rgb.ansi256 = function (args) { + var r = args[0]; + var g = args[1]; + var b = args[2]; + + // we use the extended greyscale palette here, with the exception of + // black and white. normal palette only has 4 greyscale shades. + if (r === g && g === b) { + if (r < 8) { + return 16; + } + + if (r > 248) { + return 231; + } + + return Math.round(((r - 8) / 247) * 24) + 232; + } + + var ansi = 16 + + (36 * Math.round(r / 255 * 5)) + + (6 * Math.round(g / 255 * 5)) + + Math.round(b / 255 * 5); + + return ansi; +}; + +convert.ansi16.rgb = function (args) { + var color = args % 10; + + // handle greyscale + if (color === 0 || color === 7) { + if (args > 50) { + color += 3.5; + } + + color = color / 10.5 * 255; + + return [color, color, color]; + } + + var mult = (~~(args > 50) + 1) * 0.5; + var r = ((color & 1) * mult) * 255; + var g = (((color >> 1) & 1) * mult) * 255; + var b = (((color >> 2) & 1) * mult) * 255; + + return [r, g, b]; +}; + +convert.ansi256.rgb = function (args) { + // handle greyscale + if (args >= 232) { + var c = (args - 232) * 10 + 8; + return [c, c, c]; + } + + args -= 16; + + var rem; + var r = Math.floor(args / 36) / 5 * 255; + var g = Math.floor((rem = args % 36) / 6) / 5 * 255; + var b = (rem % 6) / 5 * 255; + + return [r, g, b]; +}; + +convert.rgb.hex = function (args) { + var integer = ((Math.round(args[0]) & 0xFF) << 16) + + ((Math.round(args[1]) & 0xFF) << 8) + + (Math.round(args[2]) & 0xFF); + + var string = integer.toString(16).toUpperCase(); + return '000000'.substring(string.length) + string; +}; + +convert.hex.rgb = function (args) { + var match = args.toString(16).match(/[a-f0-9]{6}|[a-f0-9]{3}/i); + if (!match) { + return [0, 0, 0]; + } + + var colorString = match[0]; + + if (match[0].length === 3) { + colorString = colorString.split('').map(function (char) { + return char + char; + }).join(''); + } + + var integer = parseInt(colorString, 16); + var r = (integer >> 16) & 0xFF; + var g = (integer >> 8) & 0xFF; + var b = integer & 0xFF; + + return [r, g, b]; +}; + +convert.rgb.hcg = function (rgb) { + var r = rgb[0] / 255; + var g = rgb[1] / 255; + var b = rgb[2] / 255; + var max = Math.max(Math.max(r, g), b); + var min = Math.min(Math.min(r, g), b); + var chroma = (max - min); + var grayscale; + var hue; + + if (chroma < 1) { + grayscale = min / (1 - chroma); + } else { + grayscale = 0; + } + + if (chroma <= 0) { + hue = 0; + } else + if (max === r) { + hue = ((g - b) / chroma) % 6; + } else + if (max === g) { + hue = 2 + (b - r) / chroma; + } else { + hue = 4 + (r - g) / chroma + 4; + } + + hue /= 6; + hue %= 1; + + return [hue * 360, chroma * 100, grayscale * 100]; +}; + +convert.hsl.hcg = function (hsl) { + var s = hsl[1] / 100; + var l = hsl[2] / 100; + var c = 1; + var f = 0; + + if (l < 0.5) { + c = 2.0 * s * l; + } else { + c = 2.0 * s * (1.0 - l); + } + + if (c < 1.0) { + f = (l - 0.5 * c) / (1.0 - c); + } + + return [hsl[0], c * 100, f * 100]; +}; + +convert.hsv.hcg = function (hsv) { + var s = hsv[1] / 100; + var v = hsv[2] / 100; + + var c = s * v; + var f = 0; + + if (c < 1.0) { + f = (v - c) / (1 - c); + } + + return [hsv[0], c * 100, f * 100]; +}; + +convert.hcg.rgb = function (hcg) { + var h = hcg[0] / 360; + var c = hcg[1] / 100; + var g = hcg[2] / 100; + + if (c === 0.0) { + return [g * 255, g * 255, g * 255]; + } + + var pure = [0, 0, 0]; + var hi = (h % 1) * 6; + var v = hi % 1; + var w = 1 - v; + var mg = 0; + + switch (Math.floor(hi)) { + case 0: + pure[0] = 1; pure[1] = v; pure[2] = 0; break; + case 1: + pure[0] = w; pure[1] = 1; pure[2] = 0; break; + case 2: + pure[0] = 0; pure[1] = 1; pure[2] = v; break; + case 3: + pure[0] = 0; pure[1] = w; pure[2] = 1; break; + case 4: + pure[0] = v; pure[1] = 0; pure[2] = 1; break; + default: + pure[0] = 1; pure[1] = 0; pure[2] = w; + } + + mg = (1.0 - c) * g; + + return [ + (c * pure[0] + mg) * 255, + (c * pure[1] + mg) * 255, + (c * pure[2] + mg) * 255 + ]; +}; + +convert.hcg.hsv = function (hcg) { + var c = hcg[1] / 100; + var g = hcg[2] / 100; + + var v = c + g * (1.0 - c); + var f = 0; + + if (v > 0.0) { + f = c / v; + } + + return [hcg[0], f * 100, v * 100]; +}; + +convert.hcg.hsl = function (hcg) { + var c = hcg[1] / 100; + var g = hcg[2] / 100; + + var l = g * (1.0 - c) + 0.5 * c; + var s = 0; + + if (l > 0.0 && l < 0.5) { + s = c / (2 * l); + } else + if (l >= 0.5 && l < 1.0) { + s = c / (2 * (1 - l)); + } + + return [hcg[0], s * 100, l * 100]; +}; + +convert.hcg.hwb = function (hcg) { + var c = hcg[1] / 100; + var g = hcg[2] / 100; + var v = c + g * (1.0 - c); + return [hcg[0], (v - c) * 100, (1 - v) * 100]; +}; + +convert.hwb.hcg = function (hwb) { + var w = hwb[1] / 100; + var b = hwb[2] / 100; + var v = 1 - b; + var c = v - w; + var g = 0; + + if (c < 1) { + g = (v - c) / (1 - c); + } + + return [hwb[0], c * 100, g * 100]; +}; + +convert.apple.rgb = function (apple) { + return [(apple[0] / 65535) * 255, (apple[1] / 65535) * 255, (apple[2] / 65535) * 255]; +}; + +convert.rgb.apple = function (rgb) { + return [(rgb[0] / 255) * 65535, (rgb[1] / 255) * 65535, (rgb[2] / 255) * 65535]; +}; + +convert.gray.rgb = function (args) { + return [args[0] / 100 * 255, args[0] / 100 * 255, args[0] / 100 * 255]; +}; + +convert.gray.hsl = convert.gray.hsv = function (args) { + return [0, 0, args[0]]; +}; + +convert.gray.hwb = function (gray) { + return [0, 100, gray[0]]; +}; + +convert.gray.cmyk = function (gray) { + return [0, 0, 0, gray[0]]; +}; + +convert.gray.lab = function (gray) { + return [gray[0], 0, 0]; +}; + +convert.gray.hex = function (gray) { + var val = Math.round(gray[0] / 100 * 255) & 0xFF; + var integer = (val << 16) + (val << 8) + val; + + var string = integer.toString(16).toUpperCase(); + return '000000'.substring(string.length) + string; +}; + +convert.rgb.gray = function (rgb) { + var val = (rgb[0] + rgb[1] + rgb[2]) / 3; + return [val / 255 * 100]; +}; diff --git a/node_modules/color-convert/index.js b/node_modules/color-convert/index.js new file mode 100644 index 0000000..e65b5d7 --- /dev/null +++ b/node_modules/color-convert/index.js @@ -0,0 +1,78 @@ +var conversions = require('./conversions'); +var route = require('./route'); + +var convert = {}; + +var models = Object.keys(conversions); + +function wrapRaw(fn) { + var wrappedFn = function (args) { + if (args === undefined || args === null) { + return args; + } + + if (arguments.length > 1) { + args = Array.prototype.slice.call(arguments); + } + + return fn(args); + }; + + // preserve .conversion property if there is one + if ('conversion' in fn) { + wrappedFn.conversion = fn.conversion; + } + + return wrappedFn; +} + +function wrapRounded(fn) { + var wrappedFn = function (args) { + if (args === undefined || args === null) { + return args; + } + + if (arguments.length > 1) { + args = Array.prototype.slice.call(arguments); + } + + var result = fn(args); + + // we're assuming the result is an array here. + // see notice in conversions.js; don't use box types + // in conversion functions. + if (typeof result === 'object') { + for (var len = result.length, i = 0; i < len; i++) { + result[i] = Math.round(result[i]); + } + } + + return result; + }; + + // preserve .conversion property if there is one + if ('conversion' in fn) { + wrappedFn.conversion = fn.conversion; + } + + return wrappedFn; +} + +models.forEach(function (fromModel) { + convert[fromModel] = {}; + + Object.defineProperty(convert[fromModel], 'channels', {value: conversions[fromModel].channels}); + Object.defineProperty(convert[fromModel], 'labels', {value: conversions[fromModel].labels}); + + var routes = route(fromModel); + var routeModels = Object.keys(routes); + + routeModels.forEach(function (toModel) { + var fn = routes[toModel]; + + convert[fromModel][toModel] = wrapRounded(fn); + convert[fromModel][toModel].raw = wrapRaw(fn); + }); +}); + +module.exports = convert; diff --git a/node_modules/color-convert/package.json b/node_modules/color-convert/package.json new file mode 100644 index 0000000..dfbc471 --- /dev/null +++ b/node_modules/color-convert/package.json @@ -0,0 +1,46 @@ +{ + "name": "color-convert", + "description": "Plain color conversion functions", + "version": "1.9.3", + "author": "Heather Arthur <fayearthur@gmail.com>", + "license": "MIT", + "repository": "Qix-/color-convert", + "scripts": { + "pretest": "xo", + "test": "node test/basic.js" + }, + "keywords": [ + "color", + "colour", + "convert", + "converter", + "conversion", + "rgb", + "hsl", + "hsv", + "hwb", + "cmyk", + "ansi", + "ansi16" + ], + "files": [ + "index.js", + "conversions.js", + "css-keywords.js", + "route.js" + ], + "xo": { + "rules": { + "default-case": 0, + "no-inline-comments": 0, + "operator-linebreak": 0 + } + }, + "devDependencies": { + "chalk": "1.1.1", + "xo": "0.11.2" + }, + "dependencies": { + "color-name": "1.1.3" + } +} diff --git a/node_modules/color-convert/route.js b/node_modules/color-convert/route.js new file mode 100644 index 0000000..0a1fdea --- /dev/null +++ b/node_modules/color-convert/route.js @@ -0,0 +1,97 @@ +var conversions = require('./conversions'); + +/* + this function routes a model to all other models. + + all functions that are routed have a property `.conversion` attached + to the returned synthetic function. This property is an array + of strings, each with the steps in between the 'from' and 'to' + color models (inclusive). + + conversions that are not possible simply are not included. +*/ + +function buildGraph() { + var graph = {}; + // https://jsperf.com/object-keys-vs-for-in-with-closure/3 + var models = Object.keys(conversions); + + for (var len = models.length, i = 0; i < len; i++) { + graph[models[i]] = { + // http://jsperf.com/1-vs-infinity + // micro-opt, but this is simple. + distance: -1, + parent: null + }; + } + + return graph; +} + +// https://en.wikipedia.org/wiki/Breadth-first_search +function deriveBFS(fromModel) { + var graph = buildGraph(); + var queue = [fromModel]; // unshift -> queue -> pop + + graph[fromModel].distance = 0; + + while (queue.length) { + var current = queue.pop(); + var adjacents = Object.keys(conversions[current]); + + for (var len = adjacents.length, i = 0; i < len; i++) { + var adjacent = adjacents[i]; + var node = graph[adjacent]; + + if (node.distance === -1) { + node.distance = graph[current].distance + 1; + node.parent = current; + queue.unshift(adjacent); + } + } + } + + return graph; +} + +function link(from, to) { + return function (args) { + return to(from(args)); + }; +} + +function wrapConversion(toModel, graph) { + var path = [graph[toModel].parent, toModel]; + var fn = conversions[graph[toModel].parent][toModel]; + + var cur = graph[toModel].parent; + while (graph[cur].parent) { + path.unshift(graph[cur].parent); + fn = link(conversions[graph[cur].parent][cur], fn); + cur = graph[cur].parent; + } + + fn.conversion = path; + return fn; +} + +module.exports = function (fromModel) { + var graph = deriveBFS(fromModel); + var conversion = {}; + + var models = Object.keys(graph); + for (var len = models.length, i = 0; i < len; i++) { + var toModel = models[i]; + var node = graph[toModel]; + + if (node.parent === null) { + // no possible conversion, or this node is the source model. + continue; + } + + conversion[toModel] = wrapConversion(toModel, graph); + } + + return conversion; +}; + diff --git a/node_modules/color-name/.eslintrc.json b/node_modules/color-name/.eslintrc.json new file mode 100644 index 0000000..c50c250 --- /dev/null +++ b/node_modules/color-name/.eslintrc.json @@ -0,0 +1,43 @@ +{ + "env": { + "browser": true, + "node": true, + "commonjs": true, + "es6": true + }, + "extends": "eslint:recommended", + "rules": { + "strict": 2, + "indent": 0, + "linebreak-style": 0, + "quotes": 0, + "semi": 0, + "no-cond-assign": 1, + "no-constant-condition": 1, + "no-duplicate-case": 1, + "no-empty": 1, + "no-ex-assign": 1, + "no-extra-boolean-cast": 1, + "no-extra-semi": 1, + "no-fallthrough": 1, + "no-func-assign": 1, + "no-global-assign": 1, + "no-implicit-globals": 2, + "no-inner-declarations": ["error", "functions"], + "no-irregular-whitespace": 2, + "no-loop-func": 1, + "no-multi-str": 1, + "no-mixed-spaces-and-tabs": 1, + "no-proto": 1, + "no-sequences": 1, + "no-throw-literal": 1, + "no-unmodified-loop-condition": 1, + "no-useless-call": 1, + "no-void": 1, + "no-with": 2, + "wrap-iife": 1, + "no-redeclare": 1, + "no-unused-vars": ["error", { "vars": "all", "args": "none" }], + "no-sparse-arrays": 1 + } +} diff --git a/node_modules/color-name/.npmignore b/node_modules/color-name/.npmignore new file mode 100644 index 0000000..3854c07 --- /dev/null +++ b/node_modules/color-name/.npmignore @@ -0,0 +1,107 @@ +//this will affect all the git repos +git config --global core.excludesfile ~/.gitignore + + +//update files since .ignore won't if already tracked +git rm --cached <file> + +# Compiled source # +################### +*.com +*.class +*.dll +*.exe +*.o +*.so + +# Packages # +############ +# it's better to unpack these files and commit the raw source +# git has its own built in compression methods +*.7z +*.dmg +*.gz +*.iso +*.jar +*.rar +*.tar +*.zip + +# Logs and databases # +###################### +*.log +*.sql +*.sqlite + +# OS generated files # +###################### +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +# Icon? +ehthumbs.db +Thumbs.db +.cache +.project +.settings +.tmproj +*.esproj +nbproject + +# Numerous always-ignore extensions # +##################################### +*.diff +*.err +*.orig +*.rej +*.swn +*.swo +*.swp +*.vi +*~ +*.sass-cache +*.grunt +*.tmp + +# Dreamweaver added files # +########################### +_notes +dwsync.xml + +# Komodo # +########################### +*.komodoproject +.komodotools + +# Node # +##################### +node_modules + +# Bower # +##################### +bower_components + +# Folders to ignore # +##################### +.hg +.svn +.CVS +intermediate +publish +.idea +.graphics +_test +_archive +uploads +tmp + +# Vim files to ignore # +####################### +.VimballRecord +.netrwhist + +bundle.* + +_demo
\ No newline at end of file diff --git a/node_modules/color-name/LICENSE b/node_modules/color-name/LICENSE new file mode 100644 index 0000000..4d9802a --- /dev/null +++ b/node_modules/color-name/LICENSE @@ -0,0 +1,8 @@ +The MIT License (MIT) +Copyright (c) 2015 Dmitry Ivanov + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file diff --git a/node_modules/color-name/README.md b/node_modules/color-name/README.md new file mode 100644 index 0000000..3611a6b --- /dev/null +++ b/node_modules/color-name/README.md @@ -0,0 +1,11 @@ +A JSON with color names and its values. Based on http://dev.w3.org/csswg/css-color/#named-colors. + +[![NPM](https://nodei.co/npm/color-name.png?mini=true)](https://nodei.co/npm/color-name/) + + +```js +var colors = require('color-name'); +colors.red //[255,0,0] +``` + +<a href="LICENSE"><img src="https://upload.wikimedia.org/wikipedia/commons/0/0c/MIT_logo.svg" width="120"/></a> diff --git a/node_modules/color-name/index.js b/node_modules/color-name/index.js new file mode 100644 index 0000000..e42aa68 --- /dev/null +++ b/node_modules/color-name/index.js @@ -0,0 +1,152 @@ +'use strict' + +module.exports = { + "aliceblue": [240, 248, 255], + "antiquewhite": [250, 235, 215], + "aqua": [0, 255, 255], + "aquamarine": [127, 255, 212], + "azure": [240, 255, 255], + "beige": [245, 245, 220], + "bisque": [255, 228, 196], + "black": [0, 0, 0], + "blanchedalmond": [255, 235, 205], + "blue": [0, 0, 255], + "blueviolet": [138, 43, 226], + "brown": [165, 42, 42], + "burlywood": [222, 184, 135], + "cadetblue": [95, 158, 160], + "chartreuse": [127, 255, 0], + "chocolate": [210, 105, 30], + "coral": [255, 127, 80], + "cornflowerblue": [100, 149, 237], + "cornsilk": [255, 248, 220], + "crimson": [220, 20, 60], + "cyan": [0, 255, 255], + "darkblue": [0, 0, 139], + "darkcyan": [0, 139, 139], + "darkgoldenrod": [184, 134, 11], + "darkgray": [169, 169, 169], + "darkgreen": [0, 100, 0], + "darkgrey": [169, 169, 169], + "darkkhaki": [189, 183, 107], + "darkmagenta": [139, 0, 139], + "darkolivegreen": [85, 107, 47], + "darkorange": [255, 140, 0], + "darkorchid": [153, 50, 204], + "darkred": [139, 0, 0], + "darksalmon": [233, 150, 122], + "darkseagreen": [143, 188, 143], + "darkslateblue": [72, 61, 139], + "darkslategray": [47, 79, 79], + "darkslategrey": [47, 79, 79], + "darkturquoise": [0, 206, 209], + "darkviolet": [148, 0, 211], + "deeppink": [255, 20, 147], + "deepskyblue": [0, 191, 255], + "dimgray": [105, 105, 105], + "dimgrey": [105, 105, 105], + "dodgerblue": [30, 144, 255], + "firebrick": [178, 34, 34], + "floralwhite": [255, 250, 240], + "forestgreen": [34, 139, 34], + "fuchsia": [255, 0, 255], + "gainsboro": [220, 220, 220], + "ghostwhite": [248, 248, 255], + "gold": [255, 215, 0], + "goldenrod": [218, 165, 32], + "gray": [128, 128, 128], + "green": [0, 128, 0], + "greenyellow": [173, 255, 47], + "grey": [128, 128, 128], + "honeydew": [240, 255, 240], + "hotpink": [255, 105, 180], + "indianred": [205, 92, 92], + "indigo": [75, 0, 130], + "ivory": [255, 255, 240], + "khaki": [240, 230, 140], + "lavender": [230, 230, 250], + "lavenderblush": [255, 240, 245], + "lawngreen": [124, 252, 0], + "lemonchiffon": [255, 250, 205], + "lightblue": [173, 216, 230], + "lightcoral": [240, 128, 128], + "lightcyan": [224, 255, 255], + "lightgoldenrodyellow": [250, 250, 210], + "lightgray": [211, 211, 211], + "lightgreen": [144, 238, 144], + "lightgrey": [211, 211, 211], + "lightpink": [255, 182, 193], + "lightsalmon": [255, 160, 122], + "lightseagreen": [32, 178, 170], + "lightskyblue": [135, 206, 250], + "lightslategray": [119, 136, 153], + "lightslategrey": [119, 136, 153], + "lightsteelblue": [176, 196, 222], + "lightyellow": [255, 255, 224], + "lime": [0, 255, 0], + "limegreen": [50, 205, 50], + "linen": [250, 240, 230], + "magenta": [255, 0, 255], + "maroon": [128, 0, 0], + "mediumaquamarine": [102, 205, 170], + "mediumblue": [0, 0, 205], + "mediumorchid": [186, 85, 211], + "mediumpurple": [147, 112, 219], + "mediumseagreen": [60, 179, 113], + "mediumslateblue": [123, 104, 238], + "mediumspringgreen": [0, 250, 154], + "mediumturquoise": [72, 209, 204], + "mediumvioletred": [199, 21, 133], + "midnightblue": [25, 25, 112], + "mintcream": [245, 255, 250], + "mistyrose": [255, 228, 225], + "moccasin": [255, 228, 181], + "navajowhite": [255, 222, 173], + "navy": [0, 0, 128], + "oldlace": [253, 245, 230], + "olive": [128, 128, 0], + "olivedrab": [107, 142, 35], + "orange": [255, 165, 0], + "orangered": [255, 69, 0], + "orchid": [218, 112, 214], + "palegoldenrod": [238, 232, 170], + "palegreen": [152, 251, 152], + "paleturquoise": [175, 238, 238], + "palevioletred": [219, 112, 147], + "papayawhip": [255, 239, 213], + "peachpuff": [255, 218, 185], + "peru": [205, 133, 63], + "pink": [255, 192, 203], + "plum": [221, 160, 221], + "powderblue": [176, 224, 230], + "purple": [128, 0, 128], + "rebeccapurple": [102, 51, 153], + "red": [255, 0, 0], + "rosybrown": [188, 143, 143], + "royalblue": [65, 105, 225], + "saddlebrown": [139, 69, 19], + "salmon": [250, 128, 114], + "sandybrown": [244, 164, 96], + "seagreen": [46, 139, 87], + "seashell": [255, 245, 238], + "sienna": [160, 82, 45], + "silver": [192, 192, 192], + "skyblue": [135, 206, 235], + "slateblue": [106, 90, 205], + "slategray": [112, 128, 144], + "slategrey": [112, 128, 144], + "snow": [255, 250, 250], + "springgreen": [0, 255, 127], + "steelblue": [70, 130, 180], + "tan": [210, 180, 140], + "teal": [0, 128, 128], + "thistle": [216, 191, 216], + "tomato": [255, 99, 71], + "turquoise": [64, 224, 208], + "violet": [238, 130, 238], + "wheat": [245, 222, 179], + "white": [255, 255, 255], + "whitesmoke": [245, 245, 245], + "yellow": [255, 255, 0], + "yellowgreen": [154, 205, 50] +}; diff --git a/node_modules/color-name/package.json b/node_modules/color-name/package.json new file mode 100644 index 0000000..d061123 --- /dev/null +++ b/node_modules/color-name/package.json @@ -0,0 +1,25 @@ +{ + "name": "color-name", + "version": "1.1.3", + "description": "A list of color names and its values", + "main": "index.js", + "scripts": { + "test": "node test.js" + }, + "repository": { + "type": "git", + "url": "git@github.com:dfcreative/color-name.git" + }, + "keywords": [ + "color-name", + "color", + "color-keyword", + "keyword" + ], + "author": "DY <dfcreative@gmail.com>", + "license": "MIT", + "bugs": { + "url": "https://github.com/dfcreative/color-name/issues" + }, + "homepage": "https://github.com/dfcreative/color-name" +} diff --git a/node_modules/color-name/test.js b/node_modules/color-name/test.js new file mode 100644 index 0000000..7a08746 --- /dev/null +++ b/node_modules/color-name/test.js @@ -0,0 +1,7 @@ +'use strict' + +var names = require('./'); +var assert = require('assert'); + +assert.deepEqual(names.red, [255,0,0]); +assert.deepEqual(names.aliceblue, [240,248,255]); diff --git a/node_modules/concat-map/.travis.yml b/node_modules/concat-map/.travis.yml new file mode 100644 index 0000000..f1d0f13 --- /dev/null +++ b/node_modules/concat-map/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.4 + - 0.6 diff --git a/node_modules/concat-map/LICENSE b/node_modules/concat-map/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/concat-map/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/concat-map/README.markdown b/node_modules/concat-map/README.markdown new file mode 100644 index 0000000..408f70a --- /dev/null +++ b/node_modules/concat-map/README.markdown @@ -0,0 +1,62 @@ +concat-map +========== + +Concatenative mapdashery. + +[![browser support](http://ci.testling.com/substack/node-concat-map.png)](http://ci.testling.com/substack/node-concat-map) + +[![build status](https://secure.travis-ci.org/substack/node-concat-map.png)](http://travis-ci.org/substack/node-concat-map) + +example +======= + +``` js +var concatMap = require('concat-map'); +var xs = [ 1, 2, 3, 4, 5, 6 ]; +var ys = concatMap(xs, function (x) { + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; +}); +console.dir(ys); +``` + +*** + +``` +[ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ] +``` + +methods +======= + +``` js +var concatMap = require('concat-map') +``` + +concatMap(xs, fn) +----------------- + +Return an array of concatenated elements by calling `fn(x, i)` for each element +`x` and each index `i` in the array `xs`. + +When `fn(x, i)` returns an array, its result will be concatenated with the +result array. If `fn(x, i)` returns anything else, that value will be pushed +onto the end of the result array. + +install +======= + +With [npm](http://npmjs.org) do: + +``` +npm install concat-map +``` + +license +======= + +MIT + +notes +===== + +This module was written while sitting high above the ground in a tree. diff --git a/node_modules/concat-map/example/map.js b/node_modules/concat-map/example/map.js new file mode 100644 index 0000000..3365621 --- /dev/null +++ b/node_modules/concat-map/example/map.js @@ -0,0 +1,6 @@ +var concatMap = require('../'); +var xs = [ 1, 2, 3, 4, 5, 6 ]; +var ys = concatMap(xs, function (x) { + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; +}); +console.dir(ys); diff --git a/node_modules/concat-map/index.js b/node_modules/concat-map/index.js new file mode 100644 index 0000000..b29a781 --- /dev/null +++ b/node_modules/concat-map/index.js @@ -0,0 +1,13 @@ +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; diff --git a/node_modules/concat-map/package.json b/node_modules/concat-map/package.json new file mode 100644 index 0000000..d3640e6 --- /dev/null +++ b/node_modules/concat-map/package.json @@ -0,0 +1,43 @@ +{ + "name" : "concat-map", + "description" : "concatenative mapdashery", + "version" : "0.0.1", + "repository" : { + "type" : "git", + "url" : "git://github.com/substack/node-concat-map.git" + }, + "main" : "index.js", + "keywords" : [ + "concat", + "concatMap", + "map", + "functional", + "higher-order" + ], + "directories" : { + "example" : "example", + "test" : "test" + }, + "scripts" : { + "test" : "tape test/*.js" + }, + "devDependencies" : { + "tape" : "~2.4.0" + }, + "license" : "MIT", + "author" : { + "name" : "James Halliday", + "email" : "mail@substack.net", + "url" : "http://substack.net" + }, + "testling" : { + "files" : "test/*.js", + "browsers" : { + "ie" : [ 6, 7, 8, 9 ], + "ff" : [ 3.5, 10, 15.0 ], + "chrome" : [ 10, 22 ], + "safari" : [ 5.1 ], + "opera" : [ 12 ] + } + } +} diff --git a/node_modules/concat-map/test/map.js b/node_modules/concat-map/test/map.js new file mode 100644 index 0000000..fdbd702 --- /dev/null +++ b/node_modules/concat-map/test/map.js @@ -0,0 +1,39 @@ +var concatMap = require('../'); +var test = require('tape'); + +test('empty or not', function (t) { + var xs = [ 1, 2, 3, 4, 5, 6 ]; + var ixes = []; + var ys = concatMap(xs, function (x, ix) { + ixes.push(ix); + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; + }); + t.same(ys, [ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ]); + t.same(ixes, [ 0, 1, 2, 3, 4, 5 ]); + t.end(); +}); + +test('always something', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function (x) { + return x === 'b' ? [ 'B', 'B', 'B' ] : [ x ]; + }); + t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]); + t.end(); +}); + +test('scalars', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function (x) { + return x === 'b' ? [ 'B', 'B', 'B' ] : x; + }); + t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]); + t.end(); +}); + +test('undefs', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function () {}); + t.same(ys, [ undefined, undefined, undefined, undefined ]); + t.end(); +}); diff --git a/node_modules/ejs/LICENSE b/node_modules/ejs/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/node_modules/ejs/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/ejs/README.md b/node_modules/ejs/README.md new file mode 100644 index 0000000..009809c --- /dev/null +++ b/node_modules/ejs/README.md @@ -0,0 +1,341 @@ +Embedded JavaScript templates<br/> +[![Build Status](https://img.shields.io/travis/mde/ejs/master.svg?style=flat)](https://travis-ci.org/mde/ejs) +[![Developing Dependencies](https://img.shields.io/david/dev/mde/ejs.svg?style=flat)](https://david-dm.org/mde/ejs?type=dev) +[![Known Vulnerabilities](https://snyk.io/test/npm/ejs/badge.svg?style=flat)](https://snyk.io/test/npm/ejs) +============================= + +## Installation + +```bash +$ npm install ejs +``` + +## Features + + * Control flow with `<% %>` + * Escaped output with `<%= %>` (escape function configurable) + * Unescaped raw output with `<%- %>` + * Newline-trim mode ('newline slurping') with `-%>` ending tag + * Whitespace-trim mode (slurp all whitespace) for control flow with `<%_ _%>` + * Custom delimiters (e.g. `[? ?]` instead of `<% %>`) + * Includes + * Client-side support + * Static caching of intermediate JavaScript + * Static caching of templates + * Complies with the [Express](http://expressjs.com) view system + +## Example + +```ejs +<% if (user) { %> + <h2><%= user.name %></h2> +<% } %> +``` + +Try EJS online at: https://ionicabizau.github.io/ejs-playground/. + +## Basic usage + +```javascript +let template = ejs.compile(str, options); +template(data); +// => Rendered HTML string + +ejs.render(str, data, options); +// => Rendered HTML string + +ejs.renderFile(filename, data, options, function(err, str){ + // str => Rendered HTML string +}); +``` + +It is also possible to use `ejs.render(dataAndOptions);` where you pass +everything in a single object. In that case, you'll end up with local variables +for all the passed options. However, be aware that your code could break if we +add an option with the same name as one of your data object's properties. +Therefore, we do not recommend using this shortcut. + +### Options + + - `cache` Compiled functions are cached, requires `filename` + - `filename` The name of the file being rendered. Not required if you + are using `renderFile()`. Used by `cache` to key caches, and for includes. + - `root` Set project root for includes with an absolute path (e.g, /file.ejs). + Can be array to try to resolve include from multiple directories. + - `views` An array of paths to use when resolving includes with relative paths. + - `context` Function execution context + - `compileDebug` When `false` no debug instrumentation is compiled + - `client` When `true`, compiles a function that can be rendered + in the browser without needing to load the EJS Runtime + ([ejs.min.js](https://github.com/mde/ejs/releases/latest)). + - `delimiter` Character to use for inner delimiter, by default '%' + - `openDelimiter` Character to use for opening delimiter, by default '<' + - `closeDelimiter` Character to use for closing delimiter, by default '>' + - `debug` Outputs generated function body + - `strict` When set to `true`, generated function is in strict mode + - `_with` Whether or not to use `with() {}` constructs. If `false` + then the locals will be stored in the `locals` object. Set to `false` in strict mode. + - `destructuredLocals` An array of local variables that are always destructured from + the locals object, available even in strict mode. + - `localsName` Name to use for the object storing local variables when not using + `with` Defaults to `locals` + - `rmWhitespace` Remove all safe-to-remove whitespace, including leading + and trailing whitespace. It also enables a safer version of `-%>` line + slurping for all scriptlet tags (it does not strip new lines of tags in + the middle of a line). + - `escape` The escaping function used with `<%=` construct. It is + used in rendering and is `.toString()`ed in the generation of client functions. + (By default escapes XML). + - `outputFunctionName` Set to a string (e.g., 'echo' or 'print') for a function to print + output inside scriptlet tags. + - `async` When `true`, EJS will use an async function for rendering. (Depends + on async/await support in the JS runtime. + - `includer` Custom function to handle EJS includes, receives `(originalPath, parsedPath)` + parameters, where `originalPath` is the path in include as-is and `parsedPath` is the + previously resolved path. Should return an object `{ filename, template }`, + you may return only one of the properties, where `filename` is the final parsed path and `template` + is the included content. + +This project uses [JSDoc](http://usejsdoc.org/). For the full public API +documentation, clone the repository and run `jake doc`. This will run JSDoc +with the proper options and output the documentation to `out/`. If you want +the both the public & private API docs, run `jake devdoc` instead. + +### Tags + + - `<%` 'Scriptlet' tag, for control-flow, no output + - `<%_` 'Whitespace Slurping' Scriptlet tag, strips all whitespace before it + - `<%=` Outputs the value into the template (escaped) + - `<%-` Outputs the unescaped value into the template + - `<%#` Comment tag, no execution, no output + - `<%%` Outputs a literal '<%' + - `%%>` Outputs a literal '%>' + - `%>` Plain ending tag + - `-%>` Trim-mode ('newline slurp') tag, trims following newline + - `_%>` 'Whitespace Slurping' ending tag, removes all whitespace after it + +For the full syntax documentation, please see [docs/syntax.md](https://github.com/mde/ejs/blob/master/docs/syntax.md). + +### Includes + +Includes either have to be an absolute path, or, if not, are assumed as +relative to the template with the `include` call. For example if you are +including `./views/user/show.ejs` from `./views/users.ejs` you would +use `<%- include('user/show') %>`. + +You must specify the `filename` option for the template with the `include` +call unless you are using `renderFile()`. + +You'll likely want to use the raw output tag (`<%-`) with your include to avoid +double-escaping the HTML output. + +```ejs +<ul> + <% users.forEach(function(user){ %> + <%- include('user/show', {user: user}) %> + <% }); %> +</ul> +``` + +Includes are inserted at runtime, so you can use variables for the path in the +`include` call (for example `<%- include(somePath) %>`). Variables in your +top-level data object are available to all your includes, but local variables +need to be passed down. + +NOTE: Include preprocessor directives (`<% include user/show %>`) are +not supported in v3.0+. + +## Custom delimiters + +Custom delimiters can be applied on a per-template basis, or globally: + +```javascript +let ejs = require('ejs'), + users = ['geddy', 'neil', 'alex']; + +// Just one template +ejs.render('<p>[?= users.join(" | "); ?]</p>', {users: users}, {delimiter: '?', openDelimiter: '[', closeDelimiter: ']'}); +// => '<p>geddy | neil | alex</p>' + +// Or globally +ejs.delimiter = '?'; +ejs.openDelimiter = '['; +ejs.closeDelimiter = ']'; +ejs.render('<p>[?= users.join(" | "); ?]</p>', {users: users}); +// => '<p>geddy | neil | alex</p>' +``` + +### Caching + +EJS ships with a basic in-process cache for caching the intermediate JavaScript +functions used to render templates. It's easy to plug in LRU caching using +Node's `lru-cache` library: + +```javascript +let ejs = require('ejs'), + LRU = require('lru-cache'); +ejs.cache = LRU(100); // LRU cache with 100-item limit +``` + +If you want to clear the EJS cache, call `ejs.clearCache`. If you're using the +LRU cache and need a different limit, simple reset `ejs.cache` to a new instance +of the LRU. + +### Custom file loader + +The default file loader is `fs.readFileSync`, if you want to customize it, you can set ejs.fileLoader. + +```javascript +let ejs = require('ejs'); +let myFileLoad = function (filePath) { + return 'myFileLoad: ' + fs.readFileSync(filePath); +}; + +ejs.fileLoader = myFileLoad; +``` + +With this feature, you can preprocess the template before reading it. + +### Layouts + +EJS does not specifically support blocks, but layouts can be implemented by +including headers and footers, like so: + + +```ejs +<%- include('header') -%> +<h1> + Title +</h1> +<p> + My page +</p> +<%- include('footer') -%> +``` + +## Client-side support + +Go to the [Latest Release](https://github.com/mde/ejs/releases/latest), download +`./ejs.js` or `./ejs.min.js`. Alternately, you can compile it yourself by cloning +the repository and running `jake build` (or `$(npm bin)/jake build` if jake is +not installed globally). + +Include one of these files on your page, and `ejs` should be available globally. + +### Example + +```html +<div id="output"></div> +<script src="ejs.min.js"></script> +<script> + let people = ['geddy', 'neil', 'alex'], + html = ejs.render('<%= people.join(", "); %>', {people: people}); + // With jQuery: + $('#output').html(html); + // Vanilla JS: + document.getElementById('output').innerHTML = html; +</script> +``` + +### Caveats + +Most of EJS will work as expected; however, there are a few things to note: + +1. Obviously, since you do not have access to the filesystem, `ejs.renderFile()` won't work. +2. For the same reason, `include`s do not work unless you use an `include callback`. Here is an example: + ```javascript + let str = "Hello <%= include('file', {person: 'John'}); %>", + fn = ejs.compile(str, {client: true}); + + fn(data, null, function(path, d){ // include callback + // path -> 'file' + // d -> {person: 'John'} + // Put your code here + // Return the contents of file as a string + }); // returns rendered string + ``` + +See the [examples folder](https://github.com/mde/ejs/tree/master/examples) for more details. + +## CLI + +EJS ships with a full-featured CLI. Options are similar to those used in JavaScript code: + + - `-o / --output-file FILE` Write the rendered output to FILE rather than stdout. + - `-f / --data-file FILE` Must be JSON-formatted. Use parsed input from FILE as data for rendering. + - `-i / --data-input STRING` Must be JSON-formatted and URI-encoded. Use parsed input from STRING as data for rendering. + - `-m / --delimiter CHARACTER` Use CHARACTER with angle brackets for open/close (defaults to %). + - `-p / --open-delimiter CHARACTER` Use CHARACTER instead of left angle bracket to open. + - `-c / --close-delimiter CHARACTER` Use CHARACTER instead of right angle bracket to close. + - `-s / --strict` When set to `true`, generated function is in strict mode + - `-n / --no-with` Use 'locals' object for vars rather than using `with` (implies --strict). + - `-l / --locals-name` Name to use for the object storing local variables when not using `with`. + - `-w / --rm-whitespace` Remove all safe-to-remove whitespace, including leading and trailing whitespace. + - `-d / --debug` Outputs generated function body + - `-h / --help` Display this help message. + - `-V/v / --version` Display the EJS version. + +Here are some examples of usage: + +```shell +$ ejs -p [ -c ] ./template_file.ejs -o ./output.html +$ ejs ./test/fixtures/user.ejs name=Lerxst +$ ejs -n -l _ ./some_template.ejs -f ./data_file.json +``` + +### Data input + +There is a variety of ways to pass the CLI data for rendering. + +Stdin: + +```shell +$ ./test/fixtures/user_data.json | ejs ./test/fixtures/user.ejs +$ ejs ./test/fixtures/user.ejs < test/fixtures/user_data.json +``` + +A data file: + +```shell +$ ejs ./test/fixtures/user.ejs -f ./user_data.json +``` + +A command-line option (must be URI-encoded): + +```shell +./bin/cli.js -i %7B%22name%22%3A%20%22foo%22%7D ./test/fixtures/user.ejs +``` + +Or, passing values directly at the end of the invocation: + +```shell +./bin/cli.js -m $ ./test/fixtures/user.ejs name=foo +``` + +### Output + +The CLI by default send output to stdout, but you can use the `-o` or `--output-file` +flag to specify a target file to send the output to. + +## IDE Integration with Syntax Highlighting + +VSCode:Javascript EJS by *DigitalBrainstem* + +## Related projects + +There are a number of implementations of EJS: + + * TJ's implementation, the v1 of this library: https://github.com/tj/ejs + * EJS Embedded JavaScript Framework on Google Code: https://code.google.com/p/embeddedjavascript/ + * Sam Stephenson's Ruby implementation: https://rubygems.org/gems/ejs + * Erubis, an ERB implementation which also runs JavaScript: http://www.kuwata-lab.com/erubis/users-guide.04.html#lang-javascript + * DigitalBrainstem EJS Language support: https://github.com/Digitalbrainstem/ejs-grammar + +## License + +Licensed under the Apache License, Version 2.0 +(<http://www.apache.org/licenses/LICENSE-2.0>) + +- - - +EJS Embedded JavaScript templates copyright 2112 +mde@fleegix.org. diff --git a/node_modules/ejs/bin/cli.js b/node_modules/ejs/bin/cli.js new file mode 100644 index 0000000..0feab0b --- /dev/null +++ b/node_modules/ejs/bin/cli.js @@ -0,0 +1,212 @@ +#!/usr/bin/env node +/* + * EJS Embedded JavaScript templates + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + + +let program = require('jake').program; +delete global.jake; // NO NOT WANT +program.setTaskNames = function (n) { this.taskNames = n; }; + +let ejs = require('../lib/ejs'); +let { hyphenToCamel } = require('../lib/utils'); +let fs = require('fs'); +let args = process.argv.slice(2); +let usage = fs.readFileSync(`${__dirname}/../usage.txt`).toString(); + +const CLI_OPTS = [ + { full: 'output-file', + abbr: 'o', + expectValue: true, + }, + { full: 'data-file', + abbr: 'f', + expectValue: true, + }, + { full: 'data-input', + abbr: 'i', + expectValue: true, + }, + { full: 'delimiter', + abbr: 'm', + expectValue: true, + passThrough: true, + }, + { full: 'open-delimiter', + abbr: 'p', + expectValue: true, + passThrough: true, + }, + { full: 'close-delimiter', + abbr: 'c', + expectValue: true, + passThrough: true, + }, + { full: 'strict', + abbr: 's', + expectValue: false, + allowValue: false, + passThrough: true, + }, + { full: 'no-with', + abbr: 'n', + expectValue: false, + allowValue: false, + }, + { full: 'locals-name', + abbr: 'l', + expectValue: true, + passThrough: true, + }, + { full: 'rm-whitespace', + abbr: 'w', + expectValue: false, + allowValue: false, + passThrough: true, + }, + { full: 'debug', + abbr: 'd', + expectValue: false, + allowValue: false, + passThrough: true, + }, + { full: 'help', + abbr: 'h', + passThrough: true, + }, + { full: 'version', + abbr: 'V', + passThrough: true, + }, + // Alias lowercase v + { full: 'version', + abbr: 'v', + passThrough: true, + }, +]; + +let preempts = { + version: function () { + program.die(ejs.VERSION); + }, + help: function () { + program.die(usage); + } +}; + +let stdin = ''; +process.stdin.setEncoding('utf8'); +process.stdin.on('readable', () => { + let chunk; + while ((chunk = process.stdin.read()) !== null) { + stdin += chunk; + } +}); + +function run() { + + program.availableOpts = CLI_OPTS; + program.parseArgs(args); + + let templatePath = program.taskNames[0]; + let pVals = program.envVars; + let pOpts = {}; + + for (let p in program.opts) { + let name = hyphenToCamel(p); + pOpts[name] = program.opts[p]; + } + + let opts = {}; + let vals = {}; + + // Same-named 'passthrough' opts + CLI_OPTS.forEach((opt) => { + let optName = hyphenToCamel(opt.full); + if (opt.passThrough && typeof pOpts[optName] != 'undefined') { + opts[optName] = pOpts[optName]; + } + }); + + // Bail out for help/version + for (let p in opts) { + if (preempts[p]) { + return preempts[p](); + } + } + + // Default to having views relative from the current working directory + opts.views = ['.']; + + // Ensure there's a template to render + if (!templatePath) { + throw new Error('Please provide a template path. (Run ejs -h for help)'); + } + + if (opts.strict) { + pOpts.noWith = true; + } + if (pOpts.noWith) { + opts._with = false; + } + + // Grab and parse any input data, in order of precedence: + // 1. Stdin + // 2. CLI arg via -i + // 3. Data file via -f + // Any individual vals passed at the end (e.g., foo=bar) will override + // any vals previously set + let input; + let err = new Error('Please do not pass data multiple ways. Pick one of stdin, -f, or -i.'); + if (stdin) { + input = stdin; + } + else if (pOpts.dataInput) { + if (input) { + throw err; + } + input = decodeURIComponent(pOpts.dataInput); + } + else if (pOpts.dataFile) { + if (input) { + throw err; + } + input = fs.readFileSync(pOpts.dataFile).toString(); + } + + if (input) { + vals = JSON.parse(input); + } + + // Override / set any individual values passed from the command line + for (let p in pVals) { + vals[p] = pVals[p]; + } + + let template = fs.readFileSync(templatePath).toString(); + let output = ejs.render(template, vals, opts); + if (pOpts.outputFile) { + fs.writeFileSync(pOpts.outputFile, output); + } + else { + process.stdout.write(output); + } + process.exit(); +} + +// Defer execution so that stdin can be read if necessary +setImmediate(run); diff --git a/node_modules/ejs/ejs.js b/node_modules/ejs/ejs.js new file mode 100644 index 0000000..5310e76 --- /dev/null +++ b/node_modules/ejs/ejs.js @@ -0,0 +1,1662 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.ejs = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(require,module,exports){ +/* + * EJS Embedded JavaScript templates + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +'use strict'; + +/** + * @file Embedded JavaScript templating engine. {@link http://ejs.co} + * @author Matthew Eernisse <mde@fleegix.org> + * @author Tiancheng "Timothy" Gu <timothygu99@gmail.com> + * @project EJS + * @license {@link http://www.apache.org/licenses/LICENSE-2.0 Apache License, Version 2.0} + */ + +/** + * EJS internal functions. + * + * Technically this "module" lies in the same file as {@link module:ejs}, for + * the sake of organization all the private functions re grouped into this + * module. + * + * @module ejs-internal + * @private + */ + +/** + * Embedded JavaScript templating engine. + * + * @module ejs + * @public + */ + +var fs = require('fs'); +var path = require('path'); +var utils = require('./utils'); + +var scopeOptionWarned = false; +/** @type {string} */ +var _VERSION_STRING = require('../package.json').version; +var _DEFAULT_OPEN_DELIMITER = '<'; +var _DEFAULT_CLOSE_DELIMITER = '>'; +var _DEFAULT_DELIMITER = '%'; +var _DEFAULT_LOCALS_NAME = 'locals'; +var _NAME = 'ejs'; +var _REGEX_STRING = '(<%%|%%>|<%=|<%-|<%_|<%#|<%|%>|-%>|_%>)'; +var _OPTS_PASSABLE_WITH_DATA = ['delimiter', 'scope', 'context', 'debug', 'compileDebug', + 'client', '_with', 'rmWhitespace', 'strict', 'filename', 'async']; +// We don't allow 'cache' option to be passed in the data obj for +// the normal `render` call, but this is where Express 2 & 3 put it +// so we make an exception for `renderFile` +var _OPTS_PASSABLE_WITH_DATA_EXPRESS = _OPTS_PASSABLE_WITH_DATA.concat('cache'); +var _BOM = /^\uFEFF/; + +/** + * EJS template function cache. This can be a LRU object from lru-cache NPM + * module. By default, it is {@link module:utils.cache}, a simple in-process + * cache that grows continuously. + * + * @type {Cache} + */ + +exports.cache = utils.cache; + +/** + * Custom file loader. Useful for template preprocessing or restricting access + * to a certain part of the filesystem. + * + * @type {fileLoader} + */ + +exports.fileLoader = fs.readFileSync; + +/** + * Name of the object containing the locals. + * + * This variable is overridden by {@link Options}`.localsName` if it is not + * `undefined`. + * + * @type {String} + * @public + */ + +exports.localsName = _DEFAULT_LOCALS_NAME; + +/** + * Promise implementation -- defaults to the native implementation if available + * This is mostly just for testability + * + * @type {PromiseConstructorLike} + * @public + */ + +exports.promiseImpl = (new Function('return this;'))().Promise; + +/** + * Get the path to the included file from the parent file path and the + * specified path. + * + * @param {String} name specified path + * @param {String} filename parent file path + * @param {Boolean} [isDir=false] whether the parent file path is a directory + * @return {String} + */ +exports.resolveInclude = function(name, filename, isDir) { + var dirname = path.dirname; + var extname = path.extname; + var resolve = path.resolve; + var includePath = resolve(isDir ? filename : dirname(filename), name); + var ext = extname(name); + if (!ext) { + includePath += '.ejs'; + } + return includePath; +}; + +/** + * Try to resolve file path on multiple directories + * + * @param {String} name specified path + * @param {Array<String>} paths list of possible parent directory paths + * @return {String} + */ +function resolvePaths(name, paths) { + var filePath; + if (paths.some(function (v) { + filePath = exports.resolveInclude(name, v, true); + return fs.existsSync(filePath); + })) { + return filePath; + } +} + +/** + * Get the path to the included file by Options + * + * @param {String} path specified path + * @param {Options} options compilation options + * @return {String} + */ +function getIncludePath(path, options) { + var includePath; + var filePath; + var views = options.views; + var match = /^[A-Za-z]+:\\|^\//.exec(path); + + // Abs path + if (match && match.length) { + path = path.replace(/^\/*/, ''); + if (Array.isArray(options.root)) { + includePath = resolvePaths(path, options.root); + } else { + includePath = exports.resolveInclude(path, options.root || '/', true); + } + } + // Relative paths + else { + // Look relative to a passed filename first + if (options.filename) { + filePath = exports.resolveInclude(path, options.filename); + if (fs.existsSync(filePath)) { + includePath = filePath; + } + } + // Then look in any views directories + if (!includePath && Array.isArray(views)) { + includePath = resolvePaths(path, views); + } + if (!includePath && typeof options.includer !== 'function') { + throw new Error('Could not find the include file "' + + options.escapeFunction(path) + '"'); + } + } + return includePath; +} + +/** + * Get the template from a string or a file, either compiled on-the-fly or + * read from cache (if enabled), and cache the template if needed. + * + * If `template` is not set, the file specified in `options.filename` will be + * read. + * + * If `options.cache` is true, this function reads the file from + * `options.filename` so it must be set prior to calling this function. + * + * @memberof module:ejs-internal + * @param {Options} options compilation options + * @param {String} [template] template source + * @return {(TemplateFunction|ClientFunction)} + * Depending on the value of `options.client`, either type might be returned. + * @static + */ + +function handleCache(options, template) { + var func; + var filename = options.filename; + var hasTemplate = arguments.length > 1; + + if (options.cache) { + if (!filename) { + throw new Error('cache option requires a filename'); + } + func = exports.cache.get(filename); + if (func) { + return func; + } + if (!hasTemplate) { + template = fileLoader(filename).toString().replace(_BOM, ''); + } + } + else if (!hasTemplate) { + // istanbul ignore if: should not happen at all + if (!filename) { + throw new Error('Internal EJS error: no file name or template ' + + 'provided'); + } + template = fileLoader(filename).toString().replace(_BOM, ''); + } + func = exports.compile(template, options); + if (options.cache) { + exports.cache.set(filename, func); + } + return func; +} + +/** + * Try calling handleCache with the given options and data and call the + * callback with the result. If an error occurs, call the callback with + * the error. Used by renderFile(). + * + * @memberof module:ejs-internal + * @param {Options} options compilation options + * @param {Object} data template data + * @param {RenderFileCallback} cb callback + * @static + */ + +function tryHandleCache(options, data, cb) { + var result; + if (!cb) { + if (typeof exports.promiseImpl == 'function') { + return new exports.promiseImpl(function (resolve, reject) { + try { + result = handleCache(options)(data); + resolve(result); + } + catch (err) { + reject(err); + } + }); + } + else { + throw new Error('Please provide a callback function'); + } + } + else { + try { + result = handleCache(options)(data); + } + catch (err) { + return cb(err); + } + + cb(null, result); + } +} + +/** + * fileLoader is independent + * + * @param {String} filePath ejs file path. + * @return {String} The contents of the specified file. + * @static + */ + +function fileLoader(filePath){ + return exports.fileLoader(filePath); +} + +/** + * Get the template function. + * + * If `options.cache` is `true`, then the template is cached. + * + * @memberof module:ejs-internal + * @param {String} path path for the specified file + * @param {Options} options compilation options + * @return {(TemplateFunction|ClientFunction)} + * Depending on the value of `options.client`, either type might be returned + * @static + */ + +function includeFile(path, options) { + var opts = utils.shallowCopy({}, options); + opts.filename = getIncludePath(path, opts); + if (typeof options.includer === 'function') { + var includerResult = options.includer(path, opts.filename); + if (includerResult) { + if (includerResult.filename) { + opts.filename = includerResult.filename; + } + if (includerResult.template) { + return handleCache(opts, includerResult.template); + } + } + } + return handleCache(opts); +} + +/** + * Re-throw the given `err` in context to the `str` of ejs, `filename`, and + * `lineno`. + * + * @implements {RethrowCallback} + * @memberof module:ejs-internal + * @param {Error} err Error object + * @param {String} str EJS source + * @param {String} flnm file name of the EJS file + * @param {Number} lineno line number of the error + * @param {EscapeCallback} esc + * @static + */ + +function rethrow(err, str, flnm, lineno, esc) { + var lines = str.split('\n'); + var start = Math.max(lineno - 3, 0); + var end = Math.min(lines.length, lineno + 3); + var filename = esc(flnm); + // Error context + var context = lines.slice(start, end).map(function (line, i){ + var curr = i + start + 1; + return (curr == lineno ? ' >> ' : ' ') + + curr + + '| ' + + line; + }).join('\n'); + + // Alter exception message + err.path = filename; + err.message = (filename || 'ejs') + ':' + + lineno + '\n' + + context + '\n\n' + + err.message; + + throw err; +} + +function stripSemi(str){ + return str.replace(/;(\s*$)/, '$1'); +} + +/** + * Compile the given `str` of ejs into a template function. + * + * @param {String} template EJS template + * + * @param {Options} [opts] compilation options + * + * @return {(TemplateFunction|ClientFunction)} + * Depending on the value of `opts.client`, either type might be returned. + * Note that the return type of the function also depends on the value of `opts.async`. + * @public + */ + +exports.compile = function compile(template, opts) { + var templ; + + // v1 compat + // 'scope' is 'context' + // FIXME: Remove this in a future version + if (opts && opts.scope) { + if (!scopeOptionWarned){ + console.warn('`scope` option is deprecated and will be removed in EJS 3'); + scopeOptionWarned = true; + } + if (!opts.context) { + opts.context = opts.scope; + } + delete opts.scope; + } + templ = new Template(template, opts); + return templ.compile(); +}; + +/** + * Render the given `template` of ejs. + * + * If you would like to include options but not data, you need to explicitly + * call this function with `data` being an empty object or `null`. + * + * @param {String} template EJS template + * @param {Object} [data={}] template data + * @param {Options} [opts={}] compilation and rendering options + * @return {(String|Promise<String>)} + * Return value type depends on `opts.async`. + * @public + */ + +exports.render = function (template, d, o) { + var data = d || {}; + var opts = o || {}; + + // No options object -- if there are optiony names + // in the data, copy them to options + if (arguments.length == 2) { + utils.shallowCopyFromList(opts, data, _OPTS_PASSABLE_WITH_DATA); + } + + return handleCache(opts, template)(data); +}; + +/** + * Render an EJS file at the given `path` and callback `cb(err, str)`. + * + * If you would like to include options but not data, you need to explicitly + * call this function with `data` being an empty object or `null`. + * + * @param {String} path path to the EJS file + * @param {Object} [data={}] template data + * @param {Options} [opts={}] compilation and rendering options + * @param {RenderFileCallback} cb callback + * @public + */ + +exports.renderFile = function () { + var args = Array.prototype.slice.call(arguments); + var filename = args.shift(); + var cb; + var opts = {filename: filename}; + var data; + var viewOpts; + + // Do we have a callback? + if (typeof arguments[arguments.length - 1] == 'function') { + cb = args.pop(); + } + // Do we have data/opts? + if (args.length) { + // Should always have data obj + data = args.shift(); + // Normal passed opts (data obj + opts obj) + if (args.length) { + // Use shallowCopy so we don't pollute passed in opts obj with new vals + utils.shallowCopy(opts, args.pop()); + } + // Special casing for Express (settings + opts-in-data) + else { + // Express 3 and 4 + if (data.settings) { + // Pull a few things from known locations + if (data.settings.views) { + opts.views = data.settings.views; + } + if (data.settings['view cache']) { + opts.cache = true; + } + // Undocumented after Express 2, but still usable, esp. for + // items that are unsafe to be passed along with data, like `root` + viewOpts = data.settings['view options']; + if (viewOpts) { + utils.shallowCopy(opts, viewOpts); + } + } + // Express 2 and lower, values set in app.locals, or people who just + // want to pass options in their data. NOTE: These values will override + // anything previously set in settings or settings['view options'] + utils.shallowCopyFromList(opts, data, _OPTS_PASSABLE_WITH_DATA_EXPRESS); + } + opts.filename = filename; + } + else { + data = {}; + } + + return tryHandleCache(opts, data, cb); +}; + +/** + * Clear intermediate JavaScript cache. Calls {@link Cache#reset}. + * @public + */ + +/** + * EJS template class + * @public + */ +exports.Template = Template; + +exports.clearCache = function () { + exports.cache.reset(); +}; + +function Template(text, opts) { + opts = opts || {}; + var options = {}; + this.templateText = text; + /** @type {string | null} */ + this.mode = null; + this.truncate = false; + this.currentLine = 1; + this.source = ''; + options.client = opts.client || false; + options.escapeFunction = opts.escape || opts.escapeFunction || utils.escapeXML; + options.compileDebug = opts.compileDebug !== false; + options.debug = !!opts.debug; + options.filename = opts.filename; + options.openDelimiter = opts.openDelimiter || exports.openDelimiter || _DEFAULT_OPEN_DELIMITER; + options.closeDelimiter = opts.closeDelimiter || exports.closeDelimiter || _DEFAULT_CLOSE_DELIMITER; + options.delimiter = opts.delimiter || exports.delimiter || _DEFAULT_DELIMITER; + options.strict = opts.strict || false; + options.context = opts.context; + options.cache = opts.cache || false; + options.rmWhitespace = opts.rmWhitespace; + options.root = opts.root; + options.includer = opts.includer; + options.outputFunctionName = opts.outputFunctionName; + options.localsName = opts.localsName || exports.localsName || _DEFAULT_LOCALS_NAME; + options.views = opts.views; + options.async = opts.async; + options.destructuredLocals = opts.destructuredLocals; + options.legacyInclude = typeof opts.legacyInclude != 'undefined' ? !!opts.legacyInclude : true; + + if (options.strict) { + options._with = false; + } + else { + options._with = typeof opts._with != 'undefined' ? opts._with : true; + } + + this.opts = options; + + this.regex = this.createRegex(); +} + +Template.modes = { + EVAL: 'eval', + ESCAPED: 'escaped', + RAW: 'raw', + COMMENT: 'comment', + LITERAL: 'literal' +}; + +Template.prototype = { + createRegex: function () { + var str = _REGEX_STRING; + var delim = utils.escapeRegExpChars(this.opts.delimiter); + var open = utils.escapeRegExpChars(this.opts.openDelimiter); + var close = utils.escapeRegExpChars(this.opts.closeDelimiter); + str = str.replace(/%/g, delim) + .replace(/</g, open) + .replace(/>/g, close); + return new RegExp(str); + }, + + compile: function () { + /** @type {string} */ + var src; + /** @type {ClientFunction} */ + var fn; + var opts = this.opts; + var prepended = ''; + var appended = ''; + /** @type {EscapeCallback} */ + var escapeFn = opts.escapeFunction; + /** @type {FunctionConstructor} */ + var ctor; + /** @type {string} */ + var sanitizedFilename = opts.filename ? JSON.stringify(opts.filename) : 'undefined'; + + if (!this.source) { + this.generateSource(); + prepended += + ' var __output = "";\n' + + ' function __append(s) { if (s !== undefined && s !== null) __output += s }\n'; + if (opts.outputFunctionName) { + prepended += ' var ' + opts.outputFunctionName + ' = __append;' + '\n'; + } + if (opts.destructuredLocals && opts.destructuredLocals.length) { + var destructuring = ' var __locals = (' + opts.localsName + ' || {}),\n'; + for (var i = 0; i < opts.destructuredLocals.length; i++) { + var name = opts.destructuredLocals[i]; + if (i > 0) { + destructuring += ',\n '; + } + destructuring += name + ' = __locals.' + name; + } + prepended += destructuring + ';\n'; + } + if (opts._with !== false) { + prepended += ' with (' + opts.localsName + ' || {}) {' + '\n'; + appended += ' }' + '\n'; + } + appended += ' return __output;' + '\n'; + this.source = prepended + this.source + appended; + } + + if (opts.compileDebug) { + src = 'var __line = 1' + '\n' + + ' , __lines = ' + JSON.stringify(this.templateText) + '\n' + + ' , __filename = ' + sanitizedFilename + ';' + '\n' + + 'try {' + '\n' + + this.source + + '} catch (e) {' + '\n' + + ' rethrow(e, __lines, __filename, __line, escapeFn);' + '\n' + + '}' + '\n'; + } + else { + src = this.source; + } + + if (opts.client) { + src = 'escapeFn = escapeFn || ' + escapeFn.toString() + ';' + '\n' + src; + if (opts.compileDebug) { + src = 'rethrow = rethrow || ' + rethrow.toString() + ';' + '\n' + src; + } + } + + if (opts.strict) { + src = '"use strict";\n' + src; + } + if (opts.debug) { + console.log(src); + } + if (opts.compileDebug && opts.filename) { + src = src + '\n' + + '//# sourceURL=' + sanitizedFilename + '\n'; + } + + try { + if (opts.async) { + // Have to use generated function for this, since in envs without support, + // it breaks in parsing + try { + ctor = (new Function('return (async function(){}).constructor;'))(); + } + catch(e) { + if (e instanceof SyntaxError) { + throw new Error('This environment does not support async/await'); + } + else { + throw e; + } + } + } + else { + ctor = Function; + } + fn = new ctor(opts.localsName + ', escapeFn, include, rethrow', src); + } + catch(e) { + // istanbul ignore else + if (e instanceof SyntaxError) { + if (opts.filename) { + e.message += ' in ' + opts.filename; + } + e.message += ' while compiling ejs\n\n'; + e.message += 'If the above error is not helpful, you may want to try EJS-Lint:\n'; + e.message += 'https://github.com/RyanZim/EJS-Lint'; + if (!opts.async) { + e.message += '\n'; + e.message += 'Or, if you meant to create an async function, pass `async: true` as an option.'; + } + } + throw e; + } + + // Return a callable function which will execute the function + // created by the source-code, with the passed data as locals + // Adds a local `include` function which allows full recursive include + var returnedFn = opts.client ? fn : function anonymous(data) { + var include = function (path, includeData) { + var d = utils.shallowCopy({}, data); + if (includeData) { + d = utils.shallowCopy(d, includeData); + } + return includeFile(path, opts)(d); + }; + return fn.apply(opts.context, [data || {}, escapeFn, include, rethrow]); + }; + if (opts.filename && typeof Object.defineProperty === 'function') { + var filename = opts.filename; + var basename = path.basename(filename, path.extname(filename)); + try { + Object.defineProperty(returnedFn, 'name', { + value: basename, + writable: false, + enumerable: false, + configurable: true + }); + } catch (e) {/* ignore */} + } + return returnedFn; + }, + + generateSource: function () { + var opts = this.opts; + + if (opts.rmWhitespace) { + // Have to use two separate replace here as `^` and `$` operators don't + // work well with `\r` and empty lines don't work well with the `m` flag. + this.templateText = + this.templateText.replace(/[\r\n]+/g, '\n').replace(/^\s+|\s+$/gm, ''); + } + + // Slurp spaces and tabs before <%_ and after _%> + this.templateText = + this.templateText.replace(/[ \t]*<%_/gm, '<%_').replace(/_%>[ \t]*/gm, '_%>'); + + var self = this; + var matches = this.parseTemplateText(); + var d = this.opts.delimiter; + var o = this.opts.openDelimiter; + var c = this.opts.closeDelimiter; + + if (matches && matches.length) { + matches.forEach(function (line, index) { + var closing; + // If this is an opening tag, check for closing tags + // FIXME: May end up with some false positives here + // Better to store modes as k/v with openDelimiter + delimiter as key + // Then this can simply check against the map + if ( line.indexOf(o + d) === 0 // If it is a tag + && line.indexOf(o + d + d) !== 0) { // and is not escaped + closing = matches[index + 2]; + if (!(closing == d + c || closing == '-' + d + c || closing == '_' + d + c)) { + throw new Error('Could not find matching close tag for "' + line + '".'); + } + } + self.scanLine(line); + }); + } + + }, + + parseTemplateText: function () { + var str = this.templateText; + var pat = this.regex; + var result = pat.exec(str); + var arr = []; + var firstPos; + + while (result) { + firstPos = result.index; + + if (firstPos !== 0) { + arr.push(str.substring(0, firstPos)); + str = str.slice(firstPos); + } + + arr.push(result[0]); + str = str.slice(result[0].length); + result = pat.exec(str); + } + + if (str) { + arr.push(str); + } + + return arr; + }, + + _addOutput: function (line) { + if (this.truncate) { + // Only replace single leading linebreak in the line after + // -%> tag -- this is the single, trailing linebreak + // after the tag that the truncation mode replaces + // Handle Win / Unix / old Mac linebreaks -- do the \r\n + // combo first in the regex-or + line = line.replace(/^(?:\r\n|\r|\n)/, ''); + this.truncate = false; + } + if (!line) { + return line; + } + + // Preserve literal slashes + line = line.replace(/\\/g, '\\\\'); + + // Convert linebreaks + line = line.replace(/\n/g, '\\n'); + line = line.replace(/\r/g, '\\r'); + + // Escape double-quotes + // - this will be the delimiter during execution + line = line.replace(/"/g, '\\"'); + this.source += ' ; __append("' + line + '")' + '\n'; + }, + + scanLine: function (line) { + var self = this; + var d = this.opts.delimiter; + var o = this.opts.openDelimiter; + var c = this.opts.closeDelimiter; + var newLineCount = 0; + + newLineCount = (line.split('\n').length - 1); + + switch (line) { + case o + d: + case o + d + '_': + this.mode = Template.modes.EVAL; + break; + case o + d + '=': + this.mode = Template.modes.ESCAPED; + break; + case o + d + '-': + this.mode = Template.modes.RAW; + break; + case o + d + '#': + this.mode = Template.modes.COMMENT; + break; + case o + d + d: + this.mode = Template.modes.LITERAL; + this.source += ' ; __append("' + line.replace(o + d + d, o + d) + '")' + '\n'; + break; + case d + d + c: + this.mode = Template.modes.LITERAL; + this.source += ' ; __append("' + line.replace(d + d + c, d + c) + '")' + '\n'; + break; + case d + c: + case '-' + d + c: + case '_' + d + c: + if (this.mode == Template.modes.LITERAL) { + this._addOutput(line); + } + + this.mode = null; + this.truncate = line.indexOf('-') === 0 || line.indexOf('_') === 0; + break; + default: + // In script mode, depends on type of tag + if (this.mode) { + // If '//' is found without a line break, add a line break. + switch (this.mode) { + case Template.modes.EVAL: + case Template.modes.ESCAPED: + case Template.modes.RAW: + if (line.lastIndexOf('//') > line.lastIndexOf('\n')) { + line += '\n'; + } + } + switch (this.mode) { + // Just executing code + case Template.modes.EVAL: + this.source += ' ; ' + line + '\n'; + break; + // Exec, esc, and output + case Template.modes.ESCAPED: + this.source += ' ; __append(escapeFn(' + stripSemi(line) + '))' + '\n'; + break; + // Exec and output + case Template.modes.RAW: + this.source += ' ; __append(' + stripSemi(line) + ')' + '\n'; + break; + case Template.modes.COMMENT: + // Do nothing + break; + // Literal <%% mode, append as raw output + case Template.modes.LITERAL: + this._addOutput(line); + break; + } + } + // In string mode, just add the output + else { + this._addOutput(line); + } + } + + if (self.opts.compileDebug && newLineCount) { + this.currentLine += newLineCount; + this.source += ' ; __line = ' + this.currentLine + '\n'; + } + } +}; + +/** + * Escape characters reserved in XML. + * + * This is simply an export of {@link module:utils.escapeXML}. + * + * If `markup` is `undefined` or `null`, the empty string is returned. + * + * @param {String} markup Input string + * @return {String} Escaped string + * @public + * @func + * */ +exports.escapeXML = utils.escapeXML; + +/** + * Express.js support. + * + * This is an alias for {@link module:ejs.renderFile}, in order to support + * Express.js out-of-the-box. + * + * @func + */ + +exports.__express = exports.renderFile; + +/** + * Version of EJS. + * + * @readonly + * @type {String} + * @public + */ + +exports.VERSION = _VERSION_STRING; + +/** + * Name for detection of EJS. + * + * @readonly + * @type {String} + * @public + */ + +exports.name = _NAME; + +/* istanbul ignore if */ +if (typeof window != 'undefined') { + window.ejs = exports; +} + +},{"../package.json":6,"./utils":2,"fs":3,"path":4}],2:[function(require,module,exports){ +/* + * EJS Embedded JavaScript templates + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +/** + * Private utility functions + * @module utils + * @private + */ + +'use strict'; + +var regExpChars = /[|\\{}()[\]^$+*?.]/g; + +/** + * Escape characters reserved in regular expressions. + * + * If `string` is `undefined` or `null`, the empty string is returned. + * + * @param {String} string Input string + * @return {String} Escaped string + * @static + * @private + */ +exports.escapeRegExpChars = function (string) { + // istanbul ignore if + if (!string) { + return ''; + } + return String(string).replace(regExpChars, '\\$&'); +}; + +var _ENCODE_HTML_RULES = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''' +}; +var _MATCH_HTML = /[&<>'"]/g; + +function encode_char(c) { + return _ENCODE_HTML_RULES[c] || c; +} + +/** + * Stringified version of constants used by {@link module:utils.escapeXML}. + * + * It is used in the process of generating {@link ClientFunction}s. + * + * @readonly + * @type {String} + */ + +var escapeFuncStr = + 'var _ENCODE_HTML_RULES = {\n' ++ ' "&": "&"\n' ++ ' , "<": "<"\n' ++ ' , ">": ">"\n' ++ ' , \'"\': """\n' ++ ' , "\'": "'"\n' ++ ' }\n' ++ ' , _MATCH_HTML = /[&<>\'"]/g;\n' ++ 'function encode_char(c) {\n' ++ ' return _ENCODE_HTML_RULES[c] || c;\n' ++ '};\n'; + +/** + * Escape characters reserved in XML. + * + * If `markup` is `undefined` or `null`, the empty string is returned. + * + * @implements {EscapeCallback} + * @param {String} markup Input string + * @return {String} Escaped string + * @static + * @private + */ + +exports.escapeXML = function (markup) { + return markup == undefined + ? '' + : String(markup) + .replace(_MATCH_HTML, encode_char); +}; +exports.escapeXML.toString = function () { + return Function.prototype.toString.call(this) + ';\n' + escapeFuncStr; +}; + +/** + * Naive copy of properties from one object to another. + * Does not recurse into non-scalar properties + * Does not check to see if the property has a value before copying + * + * @param {Object} to Destination object + * @param {Object} from Source object + * @return {Object} Destination object + * @static + * @private + */ +exports.shallowCopy = function (to, from) { + from = from || {}; + for (var p in from) { + to[p] = from[p]; + } + return to; +}; + +/** + * Naive copy of a list of key names, from one object to another. + * Only copies property if it is actually defined + * Does not recurse into non-scalar properties + * + * @param {Object} to Destination object + * @param {Object} from Source object + * @param {Array} list List of properties to copy + * @return {Object} Destination object + * @static + * @private + */ +exports.shallowCopyFromList = function (to, from, list) { + for (var i = 0; i < list.length; i++) { + var p = list[i]; + if (typeof from[p] != 'undefined') { + to[p] = from[p]; + } + } + return to; +}; + +/** + * Simple in-process cache implementation. Does not implement limits of any + * sort. + * + * @implements {Cache} + * @static + * @private + */ +exports.cache = { + _data: {}, + set: function (key, val) { + this._data[key] = val; + }, + get: function (key) { + return this._data[key]; + }, + remove: function (key) { + delete this._data[key]; + }, + reset: function () { + this._data = {}; + } +}; + +/** + * Transforms hyphen case variable into camel case. + * + * @param {String} string Hyphen case string + * @return {String} Camel case string + * @static + * @private + */ +exports.hyphenToCamel = function (str) { + return str.replace(/-[a-z]/g, function (match) { return match[1].toUpperCase(); }); +}; + +},{}],3:[function(require,module,exports){ + +},{}],4:[function(require,module,exports){ +(function (process){ +// .dirname, .basename, and .extname methods are extracted from Node.js v8.11.1, +// backported and transplited with Babel, with backwards-compat fixes + +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// resolves . and .. elements in a path array with directory names there +// must be no slashes, empty elements, or device names (c:\) in the array +// (so also no leading and trailing slashes - it does not distinguish +// relative and absolute paths) +function normalizeArray(parts, allowAboveRoot) { + // if the path tries to go above the root, `up` ends up > 0 + var up = 0; + for (var i = parts.length - 1; i >= 0; i--) { + var last = parts[i]; + if (last === '.') { + parts.splice(i, 1); + } else if (last === '..') { + parts.splice(i, 1); + up++; + } else if (up) { + parts.splice(i, 1); + up--; + } + } + + // if the path is allowed to go above the root, restore leading ..s + if (allowAboveRoot) { + for (; up--; up) { + parts.unshift('..'); + } + } + + return parts; +} + +// path.resolve([from ...], to) +// posix version +exports.resolve = function() { + var resolvedPath = '', + resolvedAbsolute = false; + + for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) { + var path = (i >= 0) ? arguments[i] : process.cwd(); + + // Skip empty and invalid entries + if (typeof path !== 'string') { + throw new TypeError('Arguments to path.resolve must be strings'); + } else if (!path) { + continue; + } + + resolvedPath = path + '/' + resolvedPath; + resolvedAbsolute = path.charAt(0) === '/'; + } + + // At this point the path should be resolved to a full absolute path, but + // handle relative paths to be safe (might happen when process.cwd() fails) + + // Normalize the path + resolvedPath = normalizeArray(filter(resolvedPath.split('/'), function(p) { + return !!p; + }), !resolvedAbsolute).join('/'); + + return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.'; +}; + +// path.normalize(path) +// posix version +exports.normalize = function(path) { + var isAbsolute = exports.isAbsolute(path), + trailingSlash = substr(path, -1) === '/'; + + // Normalize the path + path = normalizeArray(filter(path.split('/'), function(p) { + return !!p; + }), !isAbsolute).join('/'); + + if (!path && !isAbsolute) { + path = '.'; + } + if (path && trailingSlash) { + path += '/'; + } + + return (isAbsolute ? '/' : '') + path; +}; + +// posix version +exports.isAbsolute = function(path) { + return path.charAt(0) === '/'; +}; + +// posix version +exports.join = function() { + var paths = Array.prototype.slice.call(arguments, 0); + return exports.normalize(filter(paths, function(p, index) { + if (typeof p !== 'string') { + throw new TypeError('Arguments to path.join must be strings'); + } + return p; + }).join('/')); +}; + + +// path.relative(from, to) +// posix version +exports.relative = function(from, to) { + from = exports.resolve(from).substr(1); + to = exports.resolve(to).substr(1); + + function trim(arr) { + var start = 0; + for (; start < arr.length; start++) { + if (arr[start] !== '') break; + } + + var end = arr.length - 1; + for (; end >= 0; end--) { + if (arr[end] !== '') break; + } + + if (start > end) return []; + return arr.slice(start, end - start + 1); + } + + var fromParts = trim(from.split('/')); + var toParts = trim(to.split('/')); + + var length = Math.min(fromParts.length, toParts.length); + var samePartsLength = length; + for (var i = 0; i < length; i++) { + if (fromParts[i] !== toParts[i]) { + samePartsLength = i; + break; + } + } + + var outputParts = []; + for (var i = samePartsLength; i < fromParts.length; i++) { + outputParts.push('..'); + } + + outputParts = outputParts.concat(toParts.slice(samePartsLength)); + + return outputParts.join('/'); +}; + +exports.sep = '/'; +exports.delimiter = ':'; + +exports.dirname = function (path) { + if (typeof path !== 'string') path = path + ''; + if (path.length === 0) return '.'; + var code = path.charCodeAt(0); + var hasRoot = code === 47 /*/*/; + var end = -1; + var matchedSlash = true; + for (var i = path.length - 1; i >= 1; --i) { + code = path.charCodeAt(i); + if (code === 47 /*/*/) { + if (!matchedSlash) { + end = i; + break; + } + } else { + // We saw the first non-path separator + matchedSlash = false; + } + } + + if (end === -1) return hasRoot ? '/' : '.'; + if (hasRoot && end === 1) { + // return '//'; + // Backwards-compat fix: + return '/'; + } + return path.slice(0, end); +}; + +function basename(path) { + if (typeof path !== 'string') path = path + ''; + + var start = 0; + var end = -1; + var matchedSlash = true; + var i; + + for (i = path.length - 1; i >= 0; --i) { + if (path.charCodeAt(i) === 47 /*/*/) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + start = i + 1; + break; + } + } else if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // path component + matchedSlash = false; + end = i + 1; + } + } + + if (end === -1) return ''; + return path.slice(start, end); +} + +// Uses a mixed approach for backwards-compatibility, as ext behavior changed +// in new Node.js versions, so only basename() above is backported here +exports.basename = function (path, ext) { + var f = basename(path); + if (ext && f.substr(-1 * ext.length) === ext) { + f = f.substr(0, f.length - ext.length); + } + return f; +}; + +exports.extname = function (path) { + if (typeof path !== 'string') path = path + ''; + var startDot = -1; + var startPart = 0; + var end = -1; + var matchedSlash = true; + // Track the state of characters (if any) we see before our first dot and + // after any path separator we find + var preDotState = 0; + for (var i = path.length - 1; i >= 0; --i) { + var code = path.charCodeAt(i); + if (code === 47 /*/*/) { + // If we reached a path separator that was not part of a set of path + // separators at the end of the string, stop now + if (!matchedSlash) { + startPart = i + 1; + break; + } + continue; + } + if (end === -1) { + // We saw the first non-path separator, mark this as the end of our + // extension + matchedSlash = false; + end = i + 1; + } + if (code === 46 /*.*/) { + // If this is our first dot, mark it as the start of our extension + if (startDot === -1) + startDot = i; + else if (preDotState !== 1) + preDotState = 1; + } else if (startDot !== -1) { + // We saw a non-dot and non-path separator before our dot, so we should + // have a good chance at having a non-empty extension + preDotState = -1; + } + } + + if (startDot === -1 || end === -1 || + // We saw a non-dot character immediately before the dot + preDotState === 0 || + // The (right-most) trimmed path component is exactly '..' + preDotState === 1 && startDot === end - 1 && startDot === startPart + 1) { + return ''; + } + return path.slice(startDot, end); +}; + +function filter (xs, f) { + if (xs.filter) return xs.filter(f); + var res = []; + for (var i = 0; i < xs.length; i++) { + if (f(xs[i], i, xs)) res.push(xs[i]); + } + return res; +} + +// String.prototype.substr - negative index don't work in IE8 +var substr = 'ab'.substr(-1) === 'b' + ? function (str, start, len) { return str.substr(start, len) } + : function (str, start, len) { + if (start < 0) start = str.length + start; + return str.substr(start, len); + } +; + +}).call(this,require('_process')) +},{"_process":5}],5:[function(require,module,exports){ +// shim for using process in browser +var process = module.exports = {}; + +// cached from whatever global is present so that test runners that stub it +// don't break things. But we need to wrap it in a try catch in case it is +// wrapped in strict mode code which doesn't define any globals. It's inside a +// function because try/catches deoptimize in certain engines. + +var cachedSetTimeout; +var cachedClearTimeout; + +function defaultSetTimout() { + throw new Error('setTimeout has not been defined'); +} +function defaultClearTimeout () { + throw new Error('clearTimeout has not been defined'); +} +(function () { + try { + if (typeof setTimeout === 'function') { + cachedSetTimeout = setTimeout; + } else { + cachedSetTimeout = defaultSetTimout; + } + } catch (e) { + cachedSetTimeout = defaultSetTimout; + } + try { + if (typeof clearTimeout === 'function') { + cachedClearTimeout = clearTimeout; + } else { + cachedClearTimeout = defaultClearTimeout; + } + } catch (e) { + cachedClearTimeout = defaultClearTimeout; + } +} ()) +function runTimeout(fun) { + if (cachedSetTimeout === setTimeout) { + //normal enviroments in sane situations + return setTimeout(fun, 0); + } + // if setTimeout wasn't available but was latter defined + if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { + cachedSetTimeout = setTimeout; + return setTimeout(fun, 0); + } + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedSetTimeout(fun, 0); + } catch(e){ + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedSetTimeout.call(null, fun, 0); + } catch(e){ + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error + return cachedSetTimeout.call(this, fun, 0); + } + } + + +} +function runClearTimeout(marker) { + if (cachedClearTimeout === clearTimeout) { + //normal enviroments in sane situations + return clearTimeout(marker); + } + // if clearTimeout wasn't available but was latter defined + if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { + cachedClearTimeout = clearTimeout; + return clearTimeout(marker); + } + try { + // when when somebody has screwed with setTimeout but no I.E. maddness + return cachedClearTimeout(marker); + } catch (e){ + try { + // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally + return cachedClearTimeout.call(null, marker); + } catch (e){ + // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. + // Some versions of I.E. have different rules for clearTimeout vs setTimeout + return cachedClearTimeout.call(this, marker); + } + } + + + +} +var queue = []; +var draining = false; +var currentQueue; +var queueIndex = -1; + +function cleanUpNextTick() { + if (!draining || !currentQueue) { + return; + } + draining = false; + if (currentQueue.length) { + queue = currentQueue.concat(queue); + } else { + queueIndex = -1; + } + if (queue.length) { + drainQueue(); + } +} + +function drainQueue() { + if (draining) { + return; + } + var timeout = runTimeout(cleanUpNextTick); + draining = true; + + var len = queue.length; + while(len) { + currentQueue = queue; + queue = []; + while (++queueIndex < len) { + if (currentQueue) { + currentQueue[queueIndex].run(); + } + } + queueIndex = -1; + len = queue.length; + } + currentQueue = null; + draining = false; + runClearTimeout(timeout); +} + +process.nextTick = function (fun) { + var args = new Array(arguments.length - 1); + if (arguments.length > 1) { + for (var i = 1; i < arguments.length; i++) { + args[i - 1] = arguments[i]; + } + } + queue.push(new Item(fun, args)); + if (queue.length === 1 && !draining) { + runTimeout(drainQueue); + } +}; + +// v8 likes predictible objects +function Item(fun, array) { + this.fun = fun; + this.array = array; +} +Item.prototype.run = function () { + this.fun.apply(null, this.array); +}; +process.title = 'browser'; +process.browser = true; +process.env = {}; +process.argv = []; +process.version = ''; // empty string to avoid regexp issues +process.versions = {}; + +function noop() {} + +process.on = noop; +process.addListener = noop; +process.once = noop; +process.off = noop; +process.removeListener = noop; +process.removeAllListeners = noop; +process.emit = noop; +process.prependListener = noop; +process.prependOnceListener = noop; + +process.listeners = function (name) { return [] } + +process.binding = function (name) { + throw new Error('process.binding is not supported'); +}; + +process.cwd = function () { return '/' }; +process.chdir = function (dir) { + throw new Error('process.chdir is not supported'); +}; +process.umask = function() { return 0; }; + +},{}],6:[function(require,module,exports){ +module.exports={ + "name": "ejs", + "description": "Embedded JavaScript templates", + "keywords": [ + "template", + "engine", + "ejs" + ], + "version": "3.1.6", + "author": "Matthew Eernisse <mde@fleegix.org> (http://fleegix.org)", + "license": "Apache-2.0", + "bin": { + "ejs": "./bin/cli.js" + }, + "main": "./lib/ejs.js", + "jsdelivr": "ejs.min.js", + "unpkg": "ejs.min.js", + "repository": { + "type": "git", + "url": "git://github.com/mde/ejs.git" + }, + "bugs": "https://github.com/mde/ejs/issues", + "homepage": "https://github.com/mde/ejs", + "dependencies": { + "jake": "^10.6.1" + }, + "devDependencies": { + "browserify": "^16.5.1", + "eslint": "^6.8.0", + "git-directory-deploy": "^1.5.1", + "jsdoc": "^3.6.4", + "lru-cache": "^4.0.1", + "mocha": "^7.1.1", + "uglify-js": "^3.3.16" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + } +} + +},{}]},{},[1])(1) +}); diff --git a/node_modules/ejs/ejs.min.js b/node_modules/ejs/ejs.min.js new file mode 100644 index 0000000..e40628c --- /dev/null +++ b/node_modules/ejs/ejs.min.js @@ -0,0 +1 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.ejs=f()}})(function(){var define,module,exports;return function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r}()({1:[function(require,module,exports){"use strict";var fs=require("fs");var path=require("path");var utils=require("./utils");var scopeOptionWarned=false;var _VERSION_STRING=require("../package.json").version;var _DEFAULT_OPEN_DELIMITER="<";var _DEFAULT_CLOSE_DELIMITER=">";var _DEFAULT_DELIMITER="%";var _DEFAULT_LOCALS_NAME="locals";var _NAME="ejs";var _REGEX_STRING="(<%%|%%>|<%=|<%-|<%_|<%#|<%|%>|-%>|_%>)";var _OPTS_PASSABLE_WITH_DATA=["delimiter","scope","context","debug","compileDebug","client","_with","rmWhitespace","strict","filename","async"];var _OPTS_PASSABLE_WITH_DATA_EXPRESS=_OPTS_PASSABLE_WITH_DATA.concat("cache");var _BOM=/^\uFEFF/;exports.cache=utils.cache;exports.fileLoader=fs.readFileSync;exports.localsName=_DEFAULT_LOCALS_NAME;exports.promiseImpl=new Function("return this;")().Promise;exports.resolveInclude=function(name,filename,isDir){var dirname=path.dirname;var extname=path.extname;var resolve=path.resolve;var includePath=resolve(isDir?filename:dirname(filename),name);var ext=extname(name);if(!ext){includePath+=".ejs"}return includePath};function resolvePaths(name,paths){var filePath;if(paths.some(function(v){filePath=exports.resolveInclude(name,v,true);return fs.existsSync(filePath)})){return filePath}}function getIncludePath(path,options){var includePath;var filePath;var views=options.views;var match=/^[A-Za-z]+:\\|^\//.exec(path);if(match&&match.length){path=path.replace(/^\/*/,"");if(Array.isArray(options.root)){includePath=resolvePaths(path,options.root)}else{includePath=exports.resolveInclude(path,options.root||"/",true)}}else{if(options.filename){filePath=exports.resolveInclude(path,options.filename);if(fs.existsSync(filePath)){includePath=filePath}}if(!includePath&&Array.isArray(views)){includePath=resolvePaths(path,views)}if(!includePath&&typeof options.includer!=="function"){throw new Error('Could not find the include file "'+options.escapeFunction(path)+'"')}}return includePath}function handleCache(options,template){var func;var filename=options.filename;var hasTemplate=arguments.length>1;if(options.cache){if(!filename){throw new Error("cache option requires a filename")}func=exports.cache.get(filename);if(func){return func}if(!hasTemplate){template=fileLoader(filename).toString().replace(_BOM,"")}}else if(!hasTemplate){if(!filename){throw new Error("Internal EJS error: no file name or template "+"provided")}template=fileLoader(filename).toString().replace(_BOM,"")}func=exports.compile(template,options);if(options.cache){exports.cache.set(filename,func)}return func}function tryHandleCache(options,data,cb){var result;if(!cb){if(typeof exports.promiseImpl=="function"){return new exports.promiseImpl(function(resolve,reject){try{result=handleCache(options)(data);resolve(result)}catch(err){reject(err)}})}else{throw new Error("Please provide a callback function")}}else{try{result=handleCache(options)(data)}catch(err){return cb(err)}cb(null,result)}}function fileLoader(filePath){return exports.fileLoader(filePath)}function includeFile(path,options){var opts=utils.shallowCopy({},options);opts.filename=getIncludePath(path,opts);if(typeof options.includer==="function"){var includerResult=options.includer(path,opts.filename);if(includerResult){if(includerResult.filename){opts.filename=includerResult.filename}if(includerResult.template){return handleCache(opts,includerResult.template)}}}return handleCache(opts)}function rethrow(err,str,flnm,lineno,esc){var lines=str.split("\n");var start=Math.max(lineno-3,0);var end=Math.min(lines.length,lineno+3);var filename=esc(flnm);var context=lines.slice(start,end).map(function(line,i){var curr=i+start+1;return(curr==lineno?" >> ":" ")+curr+"| "+line}).join("\n");err.path=filename;err.message=(filename||"ejs")+":"+lineno+"\n"+context+"\n\n"+err.message;throw err}function stripSemi(str){return str.replace(/;(\s*$)/,"$1")}exports.compile=function compile(template,opts){var templ;if(opts&&opts.scope){if(!scopeOptionWarned){console.warn("`scope` option is deprecated and will be removed in EJS 3");scopeOptionWarned=true}if(!opts.context){opts.context=opts.scope}delete opts.scope}templ=new Template(template,opts);return templ.compile()};exports.render=function(template,d,o){var data=d||{};var opts=o||{};if(arguments.length==2){utils.shallowCopyFromList(opts,data,_OPTS_PASSABLE_WITH_DATA)}return handleCache(opts,template)(data)};exports.renderFile=function(){var args=Array.prototype.slice.call(arguments);var filename=args.shift();var cb;var opts={filename:filename};var data;var viewOpts;if(typeof arguments[arguments.length-1]=="function"){cb=args.pop()}if(args.length){data=args.shift();if(args.length){utils.shallowCopy(opts,args.pop())}else{if(data.settings){if(data.settings.views){opts.views=data.settings.views}if(data.settings["view cache"]){opts.cache=true}viewOpts=data.settings["view options"];if(viewOpts){utils.shallowCopy(opts,viewOpts)}}utils.shallowCopyFromList(opts,data,_OPTS_PASSABLE_WITH_DATA_EXPRESS)}opts.filename=filename}else{data={}}return tryHandleCache(opts,data,cb)};exports.Template=Template;exports.clearCache=function(){exports.cache.reset()};function Template(text,opts){opts=opts||{};var options={};this.templateText=text;this.mode=null;this.truncate=false;this.currentLine=1;this.source="";options.client=opts.client||false;options.escapeFunction=opts.escape||opts.escapeFunction||utils.escapeXML;options.compileDebug=opts.compileDebug!==false;options.debug=!!opts.debug;options.filename=opts.filename;options.openDelimiter=opts.openDelimiter||exports.openDelimiter||_DEFAULT_OPEN_DELIMITER;options.closeDelimiter=opts.closeDelimiter||exports.closeDelimiter||_DEFAULT_CLOSE_DELIMITER;options.delimiter=opts.delimiter||exports.delimiter||_DEFAULT_DELIMITER;options.strict=opts.strict||false;options.context=opts.context;options.cache=opts.cache||false;options.rmWhitespace=opts.rmWhitespace;options.root=opts.root;options.includer=opts.includer;options.outputFunctionName=opts.outputFunctionName;options.localsName=opts.localsName||exports.localsName||_DEFAULT_LOCALS_NAME;options.views=opts.views;options.async=opts.async;options.destructuredLocals=opts.destructuredLocals;options.legacyInclude=typeof opts.legacyInclude!="undefined"?!!opts.legacyInclude:true;if(options.strict){options._with=false}else{options._with=typeof opts._with!="undefined"?opts._with:true}this.opts=options;this.regex=this.createRegex()}Template.modes={EVAL:"eval",ESCAPED:"escaped",RAW:"raw",COMMENT:"comment",LITERAL:"literal"};Template.prototype={createRegex:function(){var str=_REGEX_STRING;var delim=utils.escapeRegExpChars(this.opts.delimiter);var open=utils.escapeRegExpChars(this.opts.openDelimiter);var close=utils.escapeRegExpChars(this.opts.closeDelimiter);str=str.replace(/%/g,delim).replace(/</g,open).replace(/>/g,close);return new RegExp(str)},compile:function(){var src;var fn;var opts=this.opts;var prepended="";var appended="";var escapeFn=opts.escapeFunction;var ctor;var sanitizedFilename=opts.filename?JSON.stringify(opts.filename):"undefined";if(!this.source){this.generateSource();prepended+=' var __output = "";\n'+" function __append(s) { if (s !== undefined && s !== null) __output += s }\n";if(opts.outputFunctionName){prepended+=" var "+opts.outputFunctionName+" = __append;"+"\n"}if(opts.destructuredLocals&&opts.destructuredLocals.length){var destructuring=" var __locals = ("+opts.localsName+" || {}),\n";for(var i=0;i<opts.destructuredLocals.length;i++){var name=opts.destructuredLocals[i];if(i>0){destructuring+=",\n "}destructuring+=name+" = __locals."+name}prepended+=destructuring+";\n"}if(opts._with!==false){prepended+=" with ("+opts.localsName+" || {}) {"+"\n";appended+=" }"+"\n"}appended+=" return __output;"+"\n";this.source=prepended+this.source+appended}if(opts.compileDebug){src="var __line = 1"+"\n"+" , __lines = "+JSON.stringify(this.templateText)+"\n"+" , __filename = "+sanitizedFilename+";"+"\n"+"try {"+"\n"+this.source+"} catch (e) {"+"\n"+" rethrow(e, __lines, __filename, __line, escapeFn);"+"\n"+"}"+"\n"}else{src=this.source}if(opts.client){src="escapeFn = escapeFn || "+escapeFn.toString()+";"+"\n"+src;if(opts.compileDebug){src="rethrow = rethrow || "+rethrow.toString()+";"+"\n"+src}}if(opts.strict){src='"use strict";\n'+src}if(opts.debug){console.log(src)}if(opts.compileDebug&&opts.filename){src=src+"\n"+"//# sourceURL="+sanitizedFilename+"\n"}try{if(opts.async){try{ctor=new Function("return (async function(){}).constructor;")()}catch(e){if(e instanceof SyntaxError){throw new Error("This environment does not support async/await")}else{throw e}}}else{ctor=Function}fn=new ctor(opts.localsName+", escapeFn, include, rethrow",src)}catch(e){if(e instanceof SyntaxError){if(opts.filename){e.message+=" in "+opts.filename}e.message+=" while compiling ejs\n\n";e.message+="If the above error is not helpful, you may want to try EJS-Lint:\n";e.message+="https://github.com/RyanZim/EJS-Lint";if(!opts.async){e.message+="\n";e.message+="Or, if you meant to create an async function, pass `async: true` as an option."}}throw e}var returnedFn=opts.client?fn:function anonymous(data){var include=function(path,includeData){var d=utils.shallowCopy({},data);if(includeData){d=utils.shallowCopy(d,includeData)}return includeFile(path,opts)(d)};return fn.apply(opts.context,[data||{},escapeFn,include,rethrow])};if(opts.filename&&typeof Object.defineProperty==="function"){var filename=opts.filename;var basename=path.basename(filename,path.extname(filename));try{Object.defineProperty(returnedFn,"name",{value:basename,writable:false,enumerable:false,configurable:true})}catch(e){}}return returnedFn},generateSource:function(){var opts=this.opts;if(opts.rmWhitespace){this.templateText=this.templateText.replace(/[\r\n]+/g,"\n").replace(/^\s+|\s+$/gm,"")}this.templateText=this.templateText.replace(/[ \t]*<%_/gm,"<%_").replace(/_%>[ \t]*/gm,"_%>");var self=this;var matches=this.parseTemplateText();var d=this.opts.delimiter;var o=this.opts.openDelimiter;var c=this.opts.closeDelimiter;if(matches&&matches.length){matches.forEach(function(line,index){var closing;if(line.indexOf(o+d)===0&&line.indexOf(o+d+d)!==0){closing=matches[index+2];if(!(closing==d+c||closing=="-"+d+c||closing=="_"+d+c)){throw new Error('Could not find matching close tag for "'+line+'".')}}self.scanLine(line)})}},parseTemplateText:function(){var str=this.templateText;var pat=this.regex;var result=pat.exec(str);var arr=[];var firstPos;while(result){firstPos=result.index;if(firstPos!==0){arr.push(str.substring(0,firstPos));str=str.slice(firstPos)}arr.push(result[0]);str=str.slice(result[0].length);result=pat.exec(str)}if(str){arr.push(str)}return arr},_addOutput:function(line){if(this.truncate){line=line.replace(/^(?:\r\n|\r|\n)/,"");this.truncate=false}if(!line){return line}line=line.replace(/\\/g,"\\\\");line=line.replace(/\n/g,"\\n");line=line.replace(/\r/g,"\\r");line=line.replace(/"/g,'\\"');this.source+=' ; __append("'+line+'")'+"\n"},scanLine:function(line){var self=this;var d=this.opts.delimiter;var o=this.opts.openDelimiter;var c=this.opts.closeDelimiter;var newLineCount=0;newLineCount=line.split("\n").length-1;switch(line){case o+d:case o+d+"_":this.mode=Template.modes.EVAL;break;case o+d+"=":this.mode=Template.modes.ESCAPED;break;case o+d+"-":this.mode=Template.modes.RAW;break;case o+d+"#":this.mode=Template.modes.COMMENT;break;case o+d+d:this.mode=Template.modes.LITERAL;this.source+=' ; __append("'+line.replace(o+d+d,o+d)+'")'+"\n";break;case d+d+c:this.mode=Template.modes.LITERAL;this.source+=' ; __append("'+line.replace(d+d+c,d+c)+'")'+"\n";break;case d+c:case"-"+d+c:case"_"+d+c:if(this.mode==Template.modes.LITERAL){this._addOutput(line)}this.mode=null;this.truncate=line.indexOf("-")===0||line.indexOf("_")===0;break;default:if(this.mode){switch(this.mode){case Template.modes.EVAL:case Template.modes.ESCAPED:case Template.modes.RAW:if(line.lastIndexOf("//")>line.lastIndexOf("\n")){line+="\n"}}switch(this.mode){case Template.modes.EVAL:this.source+=" ; "+line+"\n";break;case Template.modes.ESCAPED:this.source+=" ; __append(escapeFn("+stripSemi(line)+"))"+"\n";break;case Template.modes.RAW:this.source+=" ; __append("+stripSemi(line)+")"+"\n";break;case Template.modes.COMMENT:break;case Template.modes.LITERAL:this._addOutput(line);break}}else{this._addOutput(line)}}if(self.opts.compileDebug&&newLineCount){this.currentLine+=newLineCount;this.source+=" ; __line = "+this.currentLine+"\n"}}};exports.escapeXML=utils.escapeXML;exports.__express=exports.renderFile;exports.VERSION=_VERSION_STRING;exports.name=_NAME;if(typeof window!="undefined"){window.ejs=exports}},{"../package.json":6,"./utils":2,fs:3,path:4}],2:[function(require,module,exports){"use strict";var regExpChars=/[|\\{}()[\]^$+*?.]/g;exports.escapeRegExpChars=function(string){if(!string){return""}return String(string).replace(regExpChars,"\\$&")};var _ENCODE_HTML_RULES={"&":"&","<":"<",">":">",'"':""","'":"'"};var _MATCH_HTML=/[&<>'"]/g;function encode_char(c){return _ENCODE_HTML_RULES[c]||c}var escapeFuncStr="var _ENCODE_HTML_RULES = {\n"+' "&": "&"\n'+' , "<": "<"\n'+' , ">": ">"\n'+' , \'"\': """\n'+' , "\'": "'"\n'+" }\n"+" , _MATCH_HTML = /[&<>'\"]/g;\n"+"function encode_char(c) {\n"+" return _ENCODE_HTML_RULES[c] || c;\n"+"};\n";exports.escapeXML=function(markup){return markup==undefined?"":String(markup).replace(_MATCH_HTML,encode_char)};exports.escapeXML.toString=function(){return Function.prototype.toString.call(this)+";\n"+escapeFuncStr};exports.shallowCopy=function(to,from){from=from||{};for(var p in from){to[p]=from[p]}return to};exports.shallowCopyFromList=function(to,from,list){for(var i=0;i<list.length;i++){var p=list[i];if(typeof from[p]!="undefined"){to[p]=from[p]}}return to};exports.cache={_data:{},set:function(key,val){this._data[key]=val},get:function(key){return this._data[key]},remove:function(key){delete this._data[key]},reset:function(){this._data={}}};exports.hyphenToCamel=function(str){return str.replace(/-[a-z]/g,function(match){return match[1].toUpperCase()})}},{}],3:[function(require,module,exports){},{}],4:[function(require,module,exports){(function(process){function normalizeArray(parts,allowAboveRoot){var up=0;for(var i=parts.length-1;i>=0;i--){var last=parts[i];if(last==="."){parts.splice(i,1)}else if(last===".."){parts.splice(i,1);up++}else if(up){parts.splice(i,1);up--}}if(allowAboveRoot){for(;up--;up){parts.unshift("..")}}return parts}exports.resolve=function(){var resolvedPath="",resolvedAbsolute=false;for(var i=arguments.length-1;i>=-1&&!resolvedAbsolute;i--){var path=i>=0?arguments[i]:process.cwd();if(typeof path!=="string"){throw new TypeError("Arguments to path.resolve must be strings")}else if(!path){continue}resolvedPath=path+"/"+resolvedPath;resolvedAbsolute=path.charAt(0)==="/"}resolvedPath=normalizeArray(filter(resolvedPath.split("/"),function(p){return!!p}),!resolvedAbsolute).join("/");return(resolvedAbsolute?"/":"")+resolvedPath||"."};exports.normalize=function(path){var isAbsolute=exports.isAbsolute(path),trailingSlash=substr(path,-1)==="/";path=normalizeArray(filter(path.split("/"),function(p){return!!p}),!isAbsolute).join("/");if(!path&&!isAbsolute){path="."}if(path&&trailingSlash){path+="/"}return(isAbsolute?"/":"")+path};exports.isAbsolute=function(path){return path.charAt(0)==="/"};exports.join=function(){var paths=Array.prototype.slice.call(arguments,0);return exports.normalize(filter(paths,function(p,index){if(typeof p!=="string"){throw new TypeError("Arguments to path.join must be strings")}return p}).join("/"))};exports.relative=function(from,to){from=exports.resolve(from).substr(1);to=exports.resolve(to).substr(1);function trim(arr){var start=0;for(;start<arr.length;start++){if(arr[start]!=="")break}var end=arr.length-1;for(;end>=0;end--){if(arr[end]!=="")break}if(start>end)return[];return arr.slice(start,end-start+1)}var fromParts=trim(from.split("/"));var toParts=trim(to.split("/"));var length=Math.min(fromParts.length,toParts.length);var samePartsLength=length;for(var i=0;i<length;i++){if(fromParts[i]!==toParts[i]){samePartsLength=i;break}}var outputParts=[];for(var i=samePartsLength;i<fromParts.length;i++){outputParts.push("..")}outputParts=outputParts.concat(toParts.slice(samePartsLength));return outputParts.join("/")};exports.sep="/";exports.delimiter=":";exports.dirname=function(path){if(typeof path!=="string")path=path+"";if(path.length===0)return".";var code=path.charCodeAt(0);var hasRoot=code===47;var end=-1;var matchedSlash=true;for(var i=path.length-1;i>=1;--i){code=path.charCodeAt(i);if(code===47){if(!matchedSlash){end=i;break}}else{matchedSlash=false}}if(end===-1)return hasRoot?"/":".";if(hasRoot&&end===1){return"/"}return path.slice(0,end)};function basename(path){if(typeof path!=="string")path=path+"";var start=0;var end=-1;var matchedSlash=true;var i;for(i=path.length-1;i>=0;--i){if(path.charCodeAt(i)===47){if(!matchedSlash){start=i+1;break}}else if(end===-1){matchedSlash=false;end=i+1}}if(end===-1)return"";return path.slice(start,end)}exports.basename=function(path,ext){var f=basename(path);if(ext&&f.substr(-1*ext.length)===ext){f=f.substr(0,f.length-ext.length)}return f};exports.extname=function(path){if(typeof path!=="string")path=path+"";var startDot=-1;var startPart=0;var end=-1;var matchedSlash=true;var preDotState=0;for(var i=path.length-1;i>=0;--i){var code=path.charCodeAt(i);if(code===47){if(!matchedSlash){startPart=i+1;break}continue}if(end===-1){matchedSlash=false;end=i+1}if(code===46){if(startDot===-1)startDot=i;else if(preDotState!==1)preDotState=1}else if(startDot!==-1){preDotState=-1}}if(startDot===-1||end===-1||preDotState===0||preDotState===1&&startDot===end-1&&startDot===startPart+1){return""}return path.slice(startDot,end)};function filter(xs,f){if(xs.filter)return xs.filter(f);var res=[];for(var i=0;i<xs.length;i++){if(f(xs[i],i,xs))res.push(xs[i])}return res}var substr="ab".substr(-1)==="b"?function(str,start,len){return str.substr(start,len)}:function(str,start,len){if(start<0)start=str.length+start;return str.substr(start,len)}}).call(this,require("_process"))},{_process:5}],5:[function(require,module,exports){var process=module.exports={};var cachedSetTimeout;var cachedClearTimeout;function defaultSetTimout(){throw new Error("setTimeout has not been defined")}function defaultClearTimeout(){throw new Error("clearTimeout has not been defined")}(function(){try{if(typeof setTimeout==="function"){cachedSetTimeout=setTimeout}else{cachedSetTimeout=defaultSetTimout}}catch(e){cachedSetTimeout=defaultSetTimout}try{if(typeof clearTimeout==="function"){cachedClearTimeout=clearTimeout}else{cachedClearTimeout=defaultClearTimeout}}catch(e){cachedClearTimeout=defaultClearTimeout}})();function runTimeout(fun){if(cachedSetTimeout===setTimeout){return setTimeout(fun,0)}if((cachedSetTimeout===defaultSetTimout||!cachedSetTimeout)&&setTimeout){cachedSetTimeout=setTimeout;return setTimeout(fun,0)}try{return cachedSetTimeout(fun,0)}catch(e){try{return cachedSetTimeout.call(null,fun,0)}catch(e){return cachedSetTimeout.call(this,fun,0)}}}function runClearTimeout(marker){if(cachedClearTimeout===clearTimeout){return clearTimeout(marker)}if((cachedClearTimeout===defaultClearTimeout||!cachedClearTimeout)&&clearTimeout){cachedClearTimeout=clearTimeout;return clearTimeout(marker)}try{return cachedClearTimeout(marker)}catch(e){try{return cachedClearTimeout.call(null,marker)}catch(e){return cachedClearTimeout.call(this,marker)}}}var queue=[];var draining=false;var currentQueue;var queueIndex=-1;function cleanUpNextTick(){if(!draining||!currentQueue){return}draining=false;if(currentQueue.length){queue=currentQueue.concat(queue)}else{queueIndex=-1}if(queue.length){drainQueue()}}function drainQueue(){if(draining){return}var timeout=runTimeout(cleanUpNextTick);draining=true;var len=queue.length;while(len){currentQueue=queue;queue=[];while(++queueIndex<len){if(currentQueue){currentQueue[queueIndex].run()}}queueIndex=-1;len=queue.length}currentQueue=null;draining=false;runClearTimeout(timeout)}process.nextTick=function(fun){var args=new Array(arguments.length-1);if(arguments.length>1){for(var i=1;i<arguments.length;i++){args[i-1]=arguments[i]}}queue.push(new Item(fun,args));if(queue.length===1&&!draining){runTimeout(drainQueue)}};function Item(fun,array){this.fun=fun;this.array=array}Item.prototype.run=function(){this.fun.apply(null,this.array)};process.title="browser";process.browser=true;process.env={};process.argv=[];process.version="";process.versions={};function noop(){}process.on=noop;process.addListener=noop;process.once=noop;process.off=noop;process.removeListener=noop;process.removeAllListeners=noop;process.emit=noop;process.prependListener=noop;process.prependOnceListener=noop;process.listeners=function(name){return[]};process.binding=function(name){throw new Error("process.binding is not supported")};process.cwd=function(){return"/"};process.chdir=function(dir){throw new Error("process.chdir is not supported")};process.umask=function(){return 0}},{}],6:[function(require,module,exports){module.exports={name:"ejs",description:"Embedded JavaScript templates",keywords:["template","engine","ejs"],version:"3.1.6",author:"Matthew Eernisse <mde@fleegix.org> (http://fleegix.org)",license:"Apache-2.0",bin:{ejs:"./bin/cli.js"},main:"./lib/ejs.js",jsdelivr:"ejs.min.js",unpkg:"ejs.min.js",repository:{type:"git",url:"git://github.com/mde/ejs.git"},bugs:"https://github.com/mde/ejs/issues",homepage:"https://github.com/mde/ejs",dependencies:{jake:"^10.6.1"},devDependencies:{browserify:"^16.5.1",eslint:"^6.8.0","git-directory-deploy":"^1.5.1",jsdoc:"^3.6.4","lru-cache":"^4.0.1",mocha:"^7.1.1","uglify-js":"^3.3.16"},engines:{node:">=0.10.0"},scripts:{test:"mocha"}}},{}]},{},[1])(1)}); diff --git a/node_modules/ejs/jakefile.js b/node_modules/ejs/jakefile.js new file mode 100644 index 0000000..3953160 --- /dev/null +++ b/node_modules/ejs/jakefile.js @@ -0,0 +1,81 @@ +var fs = require('fs'); +var execSync = require('child_process').execSync; +var exec = function (cmd) { + execSync(cmd, {stdio: 'inherit'}); +}; + +/* global jake, task, desc, publishTask */ + +task('build', ['lint', 'clean', 'browserify', 'minify'], function () { + console.log('Build completed.'); +}); + +desc('Cleans browerified/minified files and package files'); +task('clean', ['clobber'], function () { + jake.rmRf('./ejs.js'); + jake.rmRf('./ejs.min.js'); + console.log('Cleaned up compiled files.'); +}); + +desc('Lints the source code'); +task('lint', ['clean'], function () { + exec('./node_modules/.bin/eslint "**/*.js"'); + console.log('Linting completed.'); +}); + +task('browserify', function () { + exec('./node_modules/browserify/bin/cmd.js --standalone ejs lib/ejs.js > ejs.js'); + console.log('Browserification completed.'); +}); + +task('minify', function () { + exec('./node_modules/uglify-js/bin/uglifyjs ejs.js > ejs.min.js'); + console.log('Minification completed.'); +}); + +desc('Generates the EJS API docs for the public API'); +task('doc', function () { + jake.rmRf('out'); + exec('./node_modules/.bin/jsdoc --verbose -c jsdoc.json lib/* docs/jsdoc/*'); + console.log('Documentation generated in ./out.'); +}); + +desc('Generates the EJS API docs for the public and private API'); +task('devdoc', function () { + jake.rmRf('out'); + exec('./node_modules/.bin/jsdoc --verbose -p -c jsdoc.json lib/* docs/jsdoc/*'); + console.log('Documentation generated in ./out.'); +}); + +desc('Publishes the EJS API docs'); +task('docPublish', ['doc'], function () { + fs.writeFileSync('out/CNAME', 'api.ejs.co'); + console.log('Pushing docs to gh-pages...'); + exec('./node_modules/.bin/git-directory-deploy --directory out/'); + console.log('Docs published to gh-pages.'); +}); + +desc('Runs the EJS test suite'); +task('test', ['lint'], function () { + exec('./node_modules/.bin/mocha'); +}); + +publishTask('ejs', ['build'], function () { + this.packageFiles.include([ + 'jakefile.js', + 'README.md', + 'LICENSE', + 'package.json', + 'ejs.js', + 'ejs.min.js', + 'lib/**', + 'bin/**', + 'usage.txt' + ]); +}); + +jake.Task.publish.on('complete', function () { + console.log('Updating hosted docs...'); + console.log('If this fails, run jake docPublish to re-try.'); + jake.Task.docPublish.invoke(); +}); diff --git a/node_modules/ejs/lib/ejs.js b/node_modules/ejs/lib/ejs.js new file mode 100644 index 0000000..aa6322e --- /dev/null +++ b/node_modules/ejs/lib/ejs.js @@ -0,0 +1,939 @@ +/* + * EJS Embedded JavaScript templates + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +'use strict'; + +/** + * @file Embedded JavaScript templating engine. {@link http://ejs.co} + * @author Matthew Eernisse <mde@fleegix.org> + * @author Tiancheng "Timothy" Gu <timothygu99@gmail.com> + * @project EJS + * @license {@link http://www.apache.org/licenses/LICENSE-2.0 Apache License, Version 2.0} + */ + +/** + * EJS internal functions. + * + * Technically this "module" lies in the same file as {@link module:ejs}, for + * the sake of organization all the private functions re grouped into this + * module. + * + * @module ejs-internal + * @private + */ + +/** + * Embedded JavaScript templating engine. + * + * @module ejs + * @public + */ + +var fs = require('fs'); +var path = require('path'); +var utils = require('./utils'); + +var scopeOptionWarned = false; +/** @type {string} */ +var _VERSION_STRING = require('../package.json').version; +var _DEFAULT_OPEN_DELIMITER = '<'; +var _DEFAULT_CLOSE_DELIMITER = '>'; +var _DEFAULT_DELIMITER = '%'; +var _DEFAULT_LOCALS_NAME = 'locals'; +var _NAME = 'ejs'; +var _REGEX_STRING = '(<%%|%%>|<%=|<%-|<%_|<%#|<%|%>|-%>|_%>)'; +var _OPTS_PASSABLE_WITH_DATA = ['delimiter', 'scope', 'context', 'debug', 'compileDebug', + 'client', '_with', 'rmWhitespace', 'strict', 'filename', 'async']; +// We don't allow 'cache' option to be passed in the data obj for +// the normal `render` call, but this is where Express 2 & 3 put it +// so we make an exception for `renderFile` +var _OPTS_PASSABLE_WITH_DATA_EXPRESS = _OPTS_PASSABLE_WITH_DATA.concat('cache'); +var _BOM = /^\uFEFF/; + +/** + * EJS template function cache. This can be a LRU object from lru-cache NPM + * module. By default, it is {@link module:utils.cache}, a simple in-process + * cache that grows continuously. + * + * @type {Cache} + */ + +exports.cache = utils.cache; + +/** + * Custom file loader. Useful for template preprocessing or restricting access + * to a certain part of the filesystem. + * + * @type {fileLoader} + */ + +exports.fileLoader = fs.readFileSync; + +/** + * Name of the object containing the locals. + * + * This variable is overridden by {@link Options}`.localsName` if it is not + * `undefined`. + * + * @type {String} + * @public + */ + +exports.localsName = _DEFAULT_LOCALS_NAME; + +/** + * Promise implementation -- defaults to the native implementation if available + * This is mostly just for testability + * + * @type {PromiseConstructorLike} + * @public + */ + +exports.promiseImpl = (new Function('return this;'))().Promise; + +/** + * Get the path to the included file from the parent file path and the + * specified path. + * + * @param {String} name specified path + * @param {String} filename parent file path + * @param {Boolean} [isDir=false] whether the parent file path is a directory + * @return {String} + */ +exports.resolveInclude = function(name, filename, isDir) { + var dirname = path.dirname; + var extname = path.extname; + var resolve = path.resolve; + var includePath = resolve(isDir ? filename : dirname(filename), name); + var ext = extname(name); + if (!ext) { + includePath += '.ejs'; + } + return includePath; +}; + +/** + * Try to resolve file path on multiple directories + * + * @param {String} name specified path + * @param {Array<String>} paths list of possible parent directory paths + * @return {String} + */ +function resolvePaths(name, paths) { + var filePath; + if (paths.some(function (v) { + filePath = exports.resolveInclude(name, v, true); + return fs.existsSync(filePath); + })) { + return filePath; + } +} + +/** + * Get the path to the included file by Options + * + * @param {String} path specified path + * @param {Options} options compilation options + * @return {String} + */ +function getIncludePath(path, options) { + var includePath; + var filePath; + var views = options.views; + var match = /^[A-Za-z]+:\\|^\//.exec(path); + + // Abs path + if (match && match.length) { + path = path.replace(/^\/*/, ''); + if (Array.isArray(options.root)) { + includePath = resolvePaths(path, options.root); + } else { + includePath = exports.resolveInclude(path, options.root || '/', true); + } + } + // Relative paths + else { + // Look relative to a passed filename first + if (options.filename) { + filePath = exports.resolveInclude(path, options.filename); + if (fs.existsSync(filePath)) { + includePath = filePath; + } + } + // Then look in any views directories + if (!includePath && Array.isArray(views)) { + includePath = resolvePaths(path, views); + } + if (!includePath && typeof options.includer !== 'function') { + throw new Error('Could not find the include file "' + + options.escapeFunction(path) + '"'); + } + } + return includePath; +} + +/** + * Get the template from a string or a file, either compiled on-the-fly or + * read from cache (if enabled), and cache the template if needed. + * + * If `template` is not set, the file specified in `options.filename` will be + * read. + * + * If `options.cache` is true, this function reads the file from + * `options.filename` so it must be set prior to calling this function. + * + * @memberof module:ejs-internal + * @param {Options} options compilation options + * @param {String} [template] template source + * @return {(TemplateFunction|ClientFunction)} + * Depending on the value of `options.client`, either type might be returned. + * @static + */ + +function handleCache(options, template) { + var func; + var filename = options.filename; + var hasTemplate = arguments.length > 1; + + if (options.cache) { + if (!filename) { + throw new Error('cache option requires a filename'); + } + func = exports.cache.get(filename); + if (func) { + return func; + } + if (!hasTemplate) { + template = fileLoader(filename).toString().replace(_BOM, ''); + } + } + else if (!hasTemplate) { + // istanbul ignore if: should not happen at all + if (!filename) { + throw new Error('Internal EJS error: no file name or template ' + + 'provided'); + } + template = fileLoader(filename).toString().replace(_BOM, ''); + } + func = exports.compile(template, options); + if (options.cache) { + exports.cache.set(filename, func); + } + return func; +} + +/** + * Try calling handleCache with the given options and data and call the + * callback with the result. If an error occurs, call the callback with + * the error. Used by renderFile(). + * + * @memberof module:ejs-internal + * @param {Options} options compilation options + * @param {Object} data template data + * @param {RenderFileCallback} cb callback + * @static + */ + +function tryHandleCache(options, data, cb) { + var result; + if (!cb) { + if (typeof exports.promiseImpl == 'function') { + return new exports.promiseImpl(function (resolve, reject) { + try { + result = handleCache(options)(data); + resolve(result); + } + catch (err) { + reject(err); + } + }); + } + else { + throw new Error('Please provide a callback function'); + } + } + else { + try { + result = handleCache(options)(data); + } + catch (err) { + return cb(err); + } + + cb(null, result); + } +} + +/** + * fileLoader is independent + * + * @param {String} filePath ejs file path. + * @return {String} The contents of the specified file. + * @static + */ + +function fileLoader(filePath){ + return exports.fileLoader(filePath); +} + +/** + * Get the template function. + * + * If `options.cache` is `true`, then the template is cached. + * + * @memberof module:ejs-internal + * @param {String} path path for the specified file + * @param {Options} options compilation options + * @return {(TemplateFunction|ClientFunction)} + * Depending on the value of `options.client`, either type might be returned + * @static + */ + +function includeFile(path, options) { + var opts = utils.shallowCopy({}, options); + opts.filename = getIncludePath(path, opts); + if (typeof options.includer === 'function') { + var includerResult = options.includer(path, opts.filename); + if (includerResult) { + if (includerResult.filename) { + opts.filename = includerResult.filename; + } + if (includerResult.template) { + return handleCache(opts, includerResult.template); + } + } + } + return handleCache(opts); +} + +/** + * Re-throw the given `err` in context to the `str` of ejs, `filename`, and + * `lineno`. + * + * @implements {RethrowCallback} + * @memberof module:ejs-internal + * @param {Error} err Error object + * @param {String} str EJS source + * @param {String} flnm file name of the EJS file + * @param {Number} lineno line number of the error + * @param {EscapeCallback} esc + * @static + */ + +function rethrow(err, str, flnm, lineno, esc) { + var lines = str.split('\n'); + var start = Math.max(lineno - 3, 0); + var end = Math.min(lines.length, lineno + 3); + var filename = esc(flnm); + // Error context + var context = lines.slice(start, end).map(function (line, i){ + var curr = i + start + 1; + return (curr == lineno ? ' >> ' : ' ') + + curr + + '| ' + + line; + }).join('\n'); + + // Alter exception message + err.path = filename; + err.message = (filename || 'ejs') + ':' + + lineno + '\n' + + context + '\n\n' + + err.message; + + throw err; +} + +function stripSemi(str){ + return str.replace(/;(\s*$)/, '$1'); +} + +/** + * Compile the given `str` of ejs into a template function. + * + * @param {String} template EJS template + * + * @param {Options} [opts] compilation options + * + * @return {(TemplateFunction|ClientFunction)} + * Depending on the value of `opts.client`, either type might be returned. + * Note that the return type of the function also depends on the value of `opts.async`. + * @public + */ + +exports.compile = function compile(template, opts) { + var templ; + + // v1 compat + // 'scope' is 'context' + // FIXME: Remove this in a future version + if (opts && opts.scope) { + if (!scopeOptionWarned){ + console.warn('`scope` option is deprecated and will be removed in EJS 3'); + scopeOptionWarned = true; + } + if (!opts.context) { + opts.context = opts.scope; + } + delete opts.scope; + } + templ = new Template(template, opts); + return templ.compile(); +}; + +/** + * Render the given `template` of ejs. + * + * If you would like to include options but not data, you need to explicitly + * call this function with `data` being an empty object or `null`. + * + * @param {String} template EJS template + * @param {Object} [data={}] template data + * @param {Options} [opts={}] compilation and rendering options + * @return {(String|Promise<String>)} + * Return value type depends on `opts.async`. + * @public + */ + +exports.render = function (template, d, o) { + var data = d || {}; + var opts = o || {}; + + // No options object -- if there are optiony names + // in the data, copy them to options + if (arguments.length == 2) { + utils.shallowCopyFromList(opts, data, _OPTS_PASSABLE_WITH_DATA); + } + + return handleCache(opts, template)(data); +}; + +/** + * Render an EJS file at the given `path` and callback `cb(err, str)`. + * + * If you would like to include options but not data, you need to explicitly + * call this function with `data` being an empty object or `null`. + * + * @param {String} path path to the EJS file + * @param {Object} [data={}] template data + * @param {Options} [opts={}] compilation and rendering options + * @param {RenderFileCallback} cb callback + * @public + */ + +exports.renderFile = function () { + var args = Array.prototype.slice.call(arguments); + var filename = args.shift(); + var cb; + var opts = {filename: filename}; + var data; + var viewOpts; + + // Do we have a callback? + if (typeof arguments[arguments.length - 1] == 'function') { + cb = args.pop(); + } + // Do we have data/opts? + if (args.length) { + // Should always have data obj + data = args.shift(); + // Normal passed opts (data obj + opts obj) + if (args.length) { + // Use shallowCopy so we don't pollute passed in opts obj with new vals + utils.shallowCopy(opts, args.pop()); + } + // Special casing for Express (settings + opts-in-data) + else { + // Express 3 and 4 + if (data.settings) { + // Pull a few things from known locations + if (data.settings.views) { + opts.views = data.settings.views; + } + if (data.settings['view cache']) { + opts.cache = true; + } + // Undocumented after Express 2, but still usable, esp. for + // items that are unsafe to be passed along with data, like `root` + viewOpts = data.settings['view options']; + if (viewOpts) { + utils.shallowCopy(opts, viewOpts); + } + } + // Express 2 and lower, values set in app.locals, or people who just + // want to pass options in their data. NOTE: These values will override + // anything previously set in settings or settings['view options'] + utils.shallowCopyFromList(opts, data, _OPTS_PASSABLE_WITH_DATA_EXPRESS); + } + opts.filename = filename; + } + else { + data = {}; + } + + return tryHandleCache(opts, data, cb); +}; + +/** + * Clear intermediate JavaScript cache. Calls {@link Cache#reset}. + * @public + */ + +/** + * EJS template class + * @public + */ +exports.Template = Template; + +exports.clearCache = function () { + exports.cache.reset(); +}; + +function Template(text, opts) { + opts = opts || {}; + var options = {}; + this.templateText = text; + /** @type {string | null} */ + this.mode = null; + this.truncate = false; + this.currentLine = 1; + this.source = ''; + options.client = opts.client || false; + options.escapeFunction = opts.escape || opts.escapeFunction || utils.escapeXML; + options.compileDebug = opts.compileDebug !== false; + options.debug = !!opts.debug; + options.filename = opts.filename; + options.openDelimiter = opts.openDelimiter || exports.openDelimiter || _DEFAULT_OPEN_DELIMITER; + options.closeDelimiter = opts.closeDelimiter || exports.closeDelimiter || _DEFAULT_CLOSE_DELIMITER; + options.delimiter = opts.delimiter || exports.delimiter || _DEFAULT_DELIMITER; + options.strict = opts.strict || false; + options.context = opts.context; + options.cache = opts.cache || false; + options.rmWhitespace = opts.rmWhitespace; + options.root = opts.root; + options.includer = opts.includer; + options.outputFunctionName = opts.outputFunctionName; + options.localsName = opts.localsName || exports.localsName || _DEFAULT_LOCALS_NAME; + options.views = opts.views; + options.async = opts.async; + options.destructuredLocals = opts.destructuredLocals; + options.legacyInclude = typeof opts.legacyInclude != 'undefined' ? !!opts.legacyInclude : true; + + if (options.strict) { + options._with = false; + } + else { + options._with = typeof opts._with != 'undefined' ? opts._with : true; + } + + this.opts = options; + + this.regex = this.createRegex(); +} + +Template.modes = { + EVAL: 'eval', + ESCAPED: 'escaped', + RAW: 'raw', + COMMENT: 'comment', + LITERAL: 'literal' +}; + +Template.prototype = { + createRegex: function () { + var str = _REGEX_STRING; + var delim = utils.escapeRegExpChars(this.opts.delimiter); + var open = utils.escapeRegExpChars(this.opts.openDelimiter); + var close = utils.escapeRegExpChars(this.opts.closeDelimiter); + str = str.replace(/%/g, delim) + .replace(/</g, open) + .replace(/>/g, close); + return new RegExp(str); + }, + + compile: function () { + /** @type {string} */ + var src; + /** @type {ClientFunction} */ + var fn; + var opts = this.opts; + var prepended = ''; + var appended = ''; + /** @type {EscapeCallback} */ + var escapeFn = opts.escapeFunction; + /** @type {FunctionConstructor} */ + var ctor; + /** @type {string} */ + var sanitizedFilename = opts.filename ? JSON.stringify(opts.filename) : 'undefined'; + + if (!this.source) { + this.generateSource(); + prepended += + ' var __output = "";\n' + + ' function __append(s) { if (s !== undefined && s !== null) __output += s }\n'; + if (opts.outputFunctionName) { + prepended += ' var ' + opts.outputFunctionName + ' = __append;' + '\n'; + } + if (opts.destructuredLocals && opts.destructuredLocals.length) { + var destructuring = ' var __locals = (' + opts.localsName + ' || {}),\n'; + for (var i = 0; i < opts.destructuredLocals.length; i++) { + var name = opts.destructuredLocals[i]; + if (i > 0) { + destructuring += ',\n '; + } + destructuring += name + ' = __locals.' + name; + } + prepended += destructuring + ';\n'; + } + if (opts._with !== false) { + prepended += ' with (' + opts.localsName + ' || {}) {' + '\n'; + appended += ' }' + '\n'; + } + appended += ' return __output;' + '\n'; + this.source = prepended + this.source + appended; + } + + if (opts.compileDebug) { + src = 'var __line = 1' + '\n' + + ' , __lines = ' + JSON.stringify(this.templateText) + '\n' + + ' , __filename = ' + sanitizedFilename + ';' + '\n' + + 'try {' + '\n' + + this.source + + '} catch (e) {' + '\n' + + ' rethrow(e, __lines, __filename, __line, escapeFn);' + '\n' + + '}' + '\n'; + } + else { + src = this.source; + } + + if (opts.client) { + src = 'escapeFn = escapeFn || ' + escapeFn.toString() + ';' + '\n' + src; + if (opts.compileDebug) { + src = 'rethrow = rethrow || ' + rethrow.toString() + ';' + '\n' + src; + } + } + + if (opts.strict) { + src = '"use strict";\n' + src; + } + if (opts.debug) { + console.log(src); + } + if (opts.compileDebug && opts.filename) { + src = src + '\n' + + '//# sourceURL=' + sanitizedFilename + '\n'; + } + + try { + if (opts.async) { + // Have to use generated function for this, since in envs without support, + // it breaks in parsing + try { + ctor = (new Function('return (async function(){}).constructor;'))(); + } + catch(e) { + if (e instanceof SyntaxError) { + throw new Error('This environment does not support async/await'); + } + else { + throw e; + } + } + } + else { + ctor = Function; + } + fn = new ctor(opts.localsName + ', escapeFn, include, rethrow', src); + } + catch(e) { + // istanbul ignore else + if (e instanceof SyntaxError) { + if (opts.filename) { + e.message += ' in ' + opts.filename; + } + e.message += ' while compiling ejs\n\n'; + e.message += 'If the above error is not helpful, you may want to try EJS-Lint:\n'; + e.message += 'https://github.com/RyanZim/EJS-Lint'; + if (!opts.async) { + e.message += '\n'; + e.message += 'Or, if you meant to create an async function, pass `async: true` as an option.'; + } + } + throw e; + } + + // Return a callable function which will execute the function + // created by the source-code, with the passed data as locals + // Adds a local `include` function which allows full recursive include + var returnedFn = opts.client ? fn : function anonymous(data) { + var include = function (path, includeData) { + var d = utils.shallowCopy({}, data); + if (includeData) { + d = utils.shallowCopy(d, includeData); + } + return includeFile(path, opts)(d); + }; + return fn.apply(opts.context, [data || {}, escapeFn, include, rethrow]); + }; + if (opts.filename && typeof Object.defineProperty === 'function') { + var filename = opts.filename; + var basename = path.basename(filename, path.extname(filename)); + try { + Object.defineProperty(returnedFn, 'name', { + value: basename, + writable: false, + enumerable: false, + configurable: true + }); + } catch (e) {/* ignore */} + } + return returnedFn; + }, + + generateSource: function () { + var opts = this.opts; + + if (opts.rmWhitespace) { + // Have to use two separate replace here as `^` and `$` operators don't + // work well with `\r` and empty lines don't work well with the `m` flag. + this.templateText = + this.templateText.replace(/[\r\n]+/g, '\n').replace(/^\s+|\s+$/gm, ''); + } + + // Slurp spaces and tabs before <%_ and after _%> + this.templateText = + this.templateText.replace(/[ \t]*<%_/gm, '<%_').replace(/_%>[ \t]*/gm, '_%>'); + + var self = this; + var matches = this.parseTemplateText(); + var d = this.opts.delimiter; + var o = this.opts.openDelimiter; + var c = this.opts.closeDelimiter; + + if (matches && matches.length) { + matches.forEach(function (line, index) { + var closing; + // If this is an opening tag, check for closing tags + // FIXME: May end up with some false positives here + // Better to store modes as k/v with openDelimiter + delimiter as key + // Then this can simply check against the map + if ( line.indexOf(o + d) === 0 // If it is a tag + && line.indexOf(o + d + d) !== 0) { // and is not escaped + closing = matches[index + 2]; + if (!(closing == d + c || closing == '-' + d + c || closing == '_' + d + c)) { + throw new Error('Could not find matching close tag for "' + line + '".'); + } + } + self.scanLine(line); + }); + } + + }, + + parseTemplateText: function () { + var str = this.templateText; + var pat = this.regex; + var result = pat.exec(str); + var arr = []; + var firstPos; + + while (result) { + firstPos = result.index; + + if (firstPos !== 0) { + arr.push(str.substring(0, firstPos)); + str = str.slice(firstPos); + } + + arr.push(result[0]); + str = str.slice(result[0].length); + result = pat.exec(str); + } + + if (str) { + arr.push(str); + } + + return arr; + }, + + _addOutput: function (line) { + if (this.truncate) { + // Only replace single leading linebreak in the line after + // -%> tag -- this is the single, trailing linebreak + // after the tag that the truncation mode replaces + // Handle Win / Unix / old Mac linebreaks -- do the \r\n + // combo first in the regex-or + line = line.replace(/^(?:\r\n|\r|\n)/, ''); + this.truncate = false; + } + if (!line) { + return line; + } + + // Preserve literal slashes + line = line.replace(/\\/g, '\\\\'); + + // Convert linebreaks + line = line.replace(/\n/g, '\\n'); + line = line.replace(/\r/g, '\\r'); + + // Escape double-quotes + // - this will be the delimiter during execution + line = line.replace(/"/g, '\\"'); + this.source += ' ; __append("' + line + '")' + '\n'; + }, + + scanLine: function (line) { + var self = this; + var d = this.opts.delimiter; + var o = this.opts.openDelimiter; + var c = this.opts.closeDelimiter; + var newLineCount = 0; + + newLineCount = (line.split('\n').length - 1); + + switch (line) { + case o + d: + case o + d + '_': + this.mode = Template.modes.EVAL; + break; + case o + d + '=': + this.mode = Template.modes.ESCAPED; + break; + case o + d + '-': + this.mode = Template.modes.RAW; + break; + case o + d + '#': + this.mode = Template.modes.COMMENT; + break; + case o + d + d: + this.mode = Template.modes.LITERAL; + this.source += ' ; __append("' + line.replace(o + d + d, o + d) + '")' + '\n'; + break; + case d + d + c: + this.mode = Template.modes.LITERAL; + this.source += ' ; __append("' + line.replace(d + d + c, d + c) + '")' + '\n'; + break; + case d + c: + case '-' + d + c: + case '_' + d + c: + if (this.mode == Template.modes.LITERAL) { + this._addOutput(line); + } + + this.mode = null; + this.truncate = line.indexOf('-') === 0 || line.indexOf('_') === 0; + break; + default: + // In script mode, depends on type of tag + if (this.mode) { + // If '//' is found without a line break, add a line break. + switch (this.mode) { + case Template.modes.EVAL: + case Template.modes.ESCAPED: + case Template.modes.RAW: + if (line.lastIndexOf('//') > line.lastIndexOf('\n')) { + line += '\n'; + } + } + switch (this.mode) { + // Just executing code + case Template.modes.EVAL: + this.source += ' ; ' + line + '\n'; + break; + // Exec, esc, and output + case Template.modes.ESCAPED: + this.source += ' ; __append(escapeFn(' + stripSemi(line) + '))' + '\n'; + break; + // Exec and output + case Template.modes.RAW: + this.source += ' ; __append(' + stripSemi(line) + ')' + '\n'; + break; + case Template.modes.COMMENT: + // Do nothing + break; + // Literal <%% mode, append as raw output + case Template.modes.LITERAL: + this._addOutput(line); + break; + } + } + // In string mode, just add the output + else { + this._addOutput(line); + } + } + + if (self.opts.compileDebug && newLineCount) { + this.currentLine += newLineCount; + this.source += ' ; __line = ' + this.currentLine + '\n'; + } + } +}; + +/** + * Escape characters reserved in XML. + * + * This is simply an export of {@link module:utils.escapeXML}. + * + * If `markup` is `undefined` or `null`, the empty string is returned. + * + * @param {String} markup Input string + * @return {String} Escaped string + * @public + * @func + * */ +exports.escapeXML = utils.escapeXML; + +/** + * Express.js support. + * + * This is an alias for {@link module:ejs.renderFile}, in order to support + * Express.js out-of-the-box. + * + * @func + */ + +exports.__express = exports.renderFile; + +/** + * Version of EJS. + * + * @readonly + * @type {String} + * @public + */ + +exports.VERSION = _VERSION_STRING; + +/** + * Name for detection of EJS. + * + * @readonly + * @type {String} + * @public + */ + +exports.name = _NAME; + +/* istanbul ignore if */ +if (typeof window != 'undefined') { + window.ejs = exports; +} diff --git a/node_modules/ejs/lib/utils.js b/node_modules/ejs/lib/utils.js new file mode 100644 index 0000000..284de06 --- /dev/null +++ b/node_modules/ejs/lib/utils.js @@ -0,0 +1,179 @@ +/* + * EJS Embedded JavaScript templates + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +/** + * Private utility functions + * @module utils + * @private + */ + +'use strict'; + +var regExpChars = /[|\\{}()[\]^$+*?.]/g; + +/** + * Escape characters reserved in regular expressions. + * + * If `string` is `undefined` or `null`, the empty string is returned. + * + * @param {String} string Input string + * @return {String} Escaped string + * @static + * @private + */ +exports.escapeRegExpChars = function (string) { + // istanbul ignore if + if (!string) { + return ''; + } + return String(string).replace(regExpChars, '\\$&'); +}; + +var _ENCODE_HTML_RULES = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''' +}; +var _MATCH_HTML = /[&<>'"]/g; + +function encode_char(c) { + return _ENCODE_HTML_RULES[c] || c; +} + +/** + * Stringified version of constants used by {@link module:utils.escapeXML}. + * + * It is used in the process of generating {@link ClientFunction}s. + * + * @readonly + * @type {String} + */ + +var escapeFuncStr = + 'var _ENCODE_HTML_RULES = {\n' ++ ' "&": "&"\n' ++ ' , "<": "<"\n' ++ ' , ">": ">"\n' ++ ' , \'"\': """\n' ++ ' , "\'": "'"\n' ++ ' }\n' ++ ' , _MATCH_HTML = /[&<>\'"]/g;\n' ++ 'function encode_char(c) {\n' ++ ' return _ENCODE_HTML_RULES[c] || c;\n' ++ '};\n'; + +/** + * Escape characters reserved in XML. + * + * If `markup` is `undefined` or `null`, the empty string is returned. + * + * @implements {EscapeCallback} + * @param {String} markup Input string + * @return {String} Escaped string + * @static + * @private + */ + +exports.escapeXML = function (markup) { + return markup == undefined + ? '' + : String(markup) + .replace(_MATCH_HTML, encode_char); +}; +exports.escapeXML.toString = function () { + return Function.prototype.toString.call(this) + ';\n' + escapeFuncStr; +}; + +/** + * Naive copy of properties from one object to another. + * Does not recurse into non-scalar properties + * Does not check to see if the property has a value before copying + * + * @param {Object} to Destination object + * @param {Object} from Source object + * @return {Object} Destination object + * @static + * @private + */ +exports.shallowCopy = function (to, from) { + from = from || {}; + for (var p in from) { + to[p] = from[p]; + } + return to; +}; + +/** + * Naive copy of a list of key names, from one object to another. + * Only copies property if it is actually defined + * Does not recurse into non-scalar properties + * + * @param {Object} to Destination object + * @param {Object} from Source object + * @param {Array} list List of properties to copy + * @return {Object} Destination object + * @static + * @private + */ +exports.shallowCopyFromList = function (to, from, list) { + for (var i = 0; i < list.length; i++) { + var p = list[i]; + if (typeof from[p] != 'undefined') { + to[p] = from[p]; + } + } + return to; +}; + +/** + * Simple in-process cache implementation. Does not implement limits of any + * sort. + * + * @implements {Cache} + * @static + * @private + */ +exports.cache = { + _data: {}, + set: function (key, val) { + this._data[key] = val; + }, + get: function (key) { + return this._data[key]; + }, + remove: function (key) { + delete this._data[key]; + }, + reset: function () { + this._data = {}; + } +}; + +/** + * Transforms hyphen case variable into camel case. + * + * @param {String} string Hyphen case string + * @return {String} Camel case string + * @static + * @private + */ +exports.hyphenToCamel = function (str) { + return str.replace(/-[a-z]/g, function (match) { return match[1].toUpperCase(); }); +}; diff --git a/node_modules/ejs/package.json b/node_modules/ejs/package.json new file mode 100644 index 0000000..c2b036b --- /dev/null +++ b/node_modules/ejs/package.json @@ -0,0 +1,42 @@ +{ + "name": "ejs", + "description": "Embedded JavaScript templates", + "keywords": [ + "template", + "engine", + "ejs" + ], + "version": "3.1.6", + "author": "Matthew Eernisse <mde@fleegix.org> (http://fleegix.org)", + "license": "Apache-2.0", + "bin": { + "ejs": "./bin/cli.js" + }, + "main": "./lib/ejs.js", + "jsdelivr": "ejs.min.js", + "unpkg": "ejs.min.js", + "repository": { + "type": "git", + "url": "git://github.com/mde/ejs.git" + }, + "bugs": "https://github.com/mde/ejs/issues", + "homepage": "https://github.com/mde/ejs", + "dependencies": { + "jake": "^10.6.1" + }, + "devDependencies": { + "browserify": "^16.5.1", + "eslint": "^6.8.0", + "git-directory-deploy": "^1.5.1", + "jsdoc": "^3.6.4", + "lru-cache": "^4.0.1", + "mocha": "^7.1.1", + "uglify-js": "^3.3.16" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "mocha" + } +} diff --git a/node_modules/ejs/usage.txt b/node_modules/ejs/usage.txt new file mode 100644 index 0000000..7469f7f --- /dev/null +++ b/node_modules/ejs/usage.txt @@ -0,0 +1,24 @@ +EJS Embedded JavaScript templates +{Usage}: ejs [options ...] template-file [data variables ...] + +{Options}: + -o, --output-file FILE Write the rendered output to FILE rather than stdout. + -f, --data-file FILE Must be JSON-formatted. Use parsed input from FILE as data for rendering. + -i, --data-input STRING Must be JSON-formatted and URI-encoded. Use parsed input from STRING as data for rendering. + -m, --delimiter CHARACTER Use CHARACTER with angle brackets for open/close (defaults to %). + -p, --open-delimiter CHARACTER Use CHARACTER instead of left angle bracket to open. + -c, --close-delimiter CHARACTER Use CHARACTER instead of right angle bracket to close. + -s, --strict When set to `true`, generated function is in strict mode + -n --no-with Use 'locals' object for vars rather than using `with` (implies --strict). + -l --locals-name Name to use for the object storing local variables when not using `with`. + -w --rm-whitespace Remove all safe-to-remove whitespace, including leading and trailing whitespace. + -d --debug Outputs generated function body + -h, --help Display this help message. + -V/v, --version Display the EJS version. + +{Examples}: + ejs -m $ ./test/fixtures/user.ejs -f ./user_data.json + ejs -m $ ./test/fixtures/user.ejs name=Lerxst + ejs -p [ -c ] ./template_file.ejs -o ./output.html + ejs -n -l _ ./some_template.ejs -f ./data_file.json + ejs -w ./template_with_whitspace.ejs -o ./output_file.html diff --git a/node_modules/escape-string-regexp/index.js b/node_modules/escape-string-regexp/index.js new file mode 100644 index 0000000..7834bf9 --- /dev/null +++ b/node_modules/escape-string-regexp/index.js @@ -0,0 +1,11 @@ +'use strict'; + +var matchOperatorsRe = /[|\\{}()[\]^$+*?.]/g; + +module.exports = function (str) { + if (typeof str !== 'string') { + throw new TypeError('Expected a string'); + } + + return str.replace(matchOperatorsRe, '\\$&'); +}; diff --git a/node_modules/escape-string-regexp/license b/node_modules/escape-string-regexp/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/escape-string-regexp/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/escape-string-regexp/package.json b/node_modules/escape-string-regexp/package.json new file mode 100644 index 0000000..f307df3 --- /dev/null +++ b/node_modules/escape-string-regexp/package.json @@ -0,0 +1,41 @@ +{ + "name": "escape-string-regexp", + "version": "1.0.5", + "description": "Escape RegExp special characters", + "license": "MIT", + "repository": "sindresorhus/escape-string-regexp", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "maintainers": [ + "Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)", + "Joshua Boy Nicolai Appelman <joshua@jbna.nl> (jbna.nl)" + ], + "engines": { + "node": ">=0.8.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "escape", + "regex", + "regexp", + "re", + "regular", + "expression", + "string", + "str", + "special", + "characters" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/escape-string-regexp/readme.md b/node_modules/escape-string-regexp/readme.md new file mode 100644 index 0000000..87ac82d --- /dev/null +++ b/node_modules/escape-string-regexp/readme.md @@ -0,0 +1,27 @@ +# escape-string-regexp [![Build Status](https://travis-ci.org/sindresorhus/escape-string-regexp.svg?branch=master)](https://travis-ci.org/sindresorhus/escape-string-regexp) + +> Escape RegExp special characters + + +## Install + +``` +$ npm install --save escape-string-regexp +``` + + +## Usage + +```js +const escapeStringRegexp = require('escape-string-regexp'); + +const escapedString = escapeStringRegexp('how much $ for a unicorn?'); +//=> 'how much \$ for a unicorn\?' + +new RegExp(escapedString); +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/filelist/README.md b/node_modules/filelist/README.md new file mode 100644 index 0000000..b52ebe7 --- /dev/null +++ b/node_modules/filelist/README.md @@ -0,0 +1,84 @@ +## FileList + +A FileList is a lazy-evaluated list of files. When given a list +of glob patterns for possible files to be included in the file +list, instead of searching the file structures to find the files, +a FileList holds the pattern for latter use. + +This allows you to define a FileList to match any number of +files, but only search out the actual files when then FileList +itself is actually used. The key is that the first time an +element of the FileList/Array is requested, the pending patterns +are resolved into a real list of file names. + +### Usage + +Add files to the list with the `include` method. You can add glob +patterns, individual files, or RegExp objects. When the Array +methods are invoked on the FileList, these items are resolved to +an actual list of files. + +```javascript +var fl = new FileList(); +fl.include('test/*.js'); +fl.exclude('test/helpers.js'); +``` + +Use the `exclude` method to override inclusions. You can use this +when your inclusions are too broad. + +### Array methods + +FileList has lazy-evaluated versions of most of the array +methods, including the following: + +* join +* pop +* push +* concat +* reverse +* shift +* unshift +* slice +* splice +* sort +* filter +* forEach +* some +* every +* map +* indexOf +* lastIndexOf +* reduce +* reduceRight + +When you call one of these methods, the items in the FileList +will be resolved to the full list of files, and the method will +be invoked on that result. + +### Special `length` method + +`length`: FileList includes a length *method* (instead of a +property) which returns the number of actual files in the list +once it's been resolved. + +### FileList-specific methods + +`include`: Add a filename/glob/regex to the list + +`exclude`: Override inclusions by excluding a filename/glob/regex + +`resolve`: Resolve the items in the FileList to the full list of +files. This method is invoked automatically when one of the array +methods is called. + +`toArray`: Immediately resolves the list of items, and returns an +actual array of filepaths. + +`clearInclusions`: Clears any pending items -- must be used +before resolving the list. + +`clearExclusions`: Clears the list of exclusions rules. + + + diff --git a/node_modules/filelist/index.d.ts b/node_modules/filelist/index.d.ts new file mode 100644 index 0000000..dbf6dc5 --- /dev/null +++ b/node_modules/filelist/index.d.ts @@ -0,0 +1,54 @@ +// Type definitions for filelist v0.0.6 +// Project: https://github.com/mde/filelist +// Definitions by: Christophe MASSOLIN <https://github.com/FuriouZz> +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +declare module "filelist" { + export class FileList { + pendingAdd: string[] + pending: boolean + excludes: { + pats: RegExp[], + funcs: Function[], + regex: null | RegExp + } + items: string[] + static clone(): FileList + static verbose: boolean + toArray(): string[] + include(options: any, ...items: string[]): void + exclude(...items: string[]): void + resolve(): void + clearInclusions(): void + clearExclusions(): void + length(): number + toString(): string; + toLocaleString(): string; + push(...items: string[]): number; + pop(): string | undefined; + concat(...items: ReadonlyArray<string>[]): string[]; + concat(...items: (string | ReadonlyArray<string>)[]): string[]; + join(separator?: string): string; + reverse(): string[]; + shift(): string | undefined; + slice(start?: number, end?: number): string[]; + sort(compareFn?: (a: string, b: string) => number): this; + splice(start: number, deleteCount?: number): string[]; + splice(start: number, deleteCount: number, ...items: string[]): string[]; + unshift(...items: string[]): number; + indexOf(searchElement: string, fromIndex?: number): number; + lastIndexOf(searchElement: string, fromIndex?: number): number; + every(callbackfn: (value: string, index: number, array: string[]) => boolean, thisArg?: any): boolean; + some(callbackfn: (value: string, index: number, array: string[]) => boolean, thisArg?: any): boolean; + forEach(callbackfn: (value: string, index: number, array: string[]) => void, thisArg?: any): void; + map<U>(callbackfn: (value: string, index: number, array: string[]) => U, thisArg?: any): U[]; + filter<S extends string>(callbackfn: (value: string, index: number, array: string[]) => value is S, thisArg?: any): S[]; + filter(callbackfn: (value: string, index: number, array: string[]) => any, thisArg?: any): string[]; + reduce(callbackfn: (previousValue: string, currentValue: string, currentIndex: number, array: string[]) => string): string; + reduce(callbackfn: (previousValue: string, currentValue: string, currentIndex: number, array: string[]) => string, initialValue: string): string; + reduce<U>(callbackfn: (previousValue: U, currentValue: string, currentIndex: number, array: string[]) => U, initialValue: U): U; + reduceRight(callbackfn: (previousValue: string, currentValue: string, currentIndex: number, array: string[]) => string): string; + reduceRight(callbackfn: (previousValue: string, currentValue: string, currentIndex: number, array: string[]) => string, initialValue: string): string; + reduceRight<U>(callbackfn: (previousValue: U, currentValue: string, currentIndex: number, array: string[]) => U, initialValue: U): U; + } +}
\ No newline at end of file diff --git a/node_modules/filelist/index.js b/node_modules/filelist/index.js new file mode 100644 index 0000000..fef873a --- /dev/null +++ b/node_modules/filelist/index.js @@ -0,0 +1,486 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ +var fs = require('fs') +, path = require('path') +, minimatch = require('minimatch') +, escapeRegExpChars +, merge +, basedir +, _readDir +, readdirR +, globSync; + + /** + @name escapeRegExpChars + @function + @return {String} A string of escaped characters + @description Escapes regex control-characters in strings + used to build regexes dynamically + @param {String} string The string of chars to escape + */ + escapeRegExpChars = (function () { + var specials = [ '^', '$', '/', '.', '*', '+', '?', '|', '(', ')', + '[', ']', '{', '}', '\\' ]; + var sRE = new RegExp('(\\' + specials.join('|\\') + ')', 'g'); + return function (string) { + var str = string || ''; + str = String(str); + return str.replace(sRE, '\\$1'); + }; + })(); + + /** + @name merge + @function + @return {Object} Returns the merged object + @description Merge merges `otherObject` into `object` and takes care of deep + merging of objects + @param {Object} object Object to merge into + @param {Object} otherObject Object to read from + */ + merge = function (object, otherObject) { + var obj = object || {} + , otherObj = otherObject || {} + , key, value; + + for (key in otherObj) { + value = otherObj[key]; + + // Check if a value is an Object, if so recursively add it's key/values + if (typeof value === 'object' && !(value instanceof Array)) { + // Update value of object to the one from otherObj + obj[key] = merge(obj[key], value); + } + // Value is anything other than an Object, so just add it + else { + obj[key] = value; + } + } + + return obj; + }; + /** + Given a patern, return the base directory of it (ie. the folder + that will contain all the files matching the path). + eg. file.basedir('/test/**') => '/test/' + Path ending by '/' are considerd as folder while other are considerd + as files, eg.: + file.basedir('/test/a/') => '/test/a' + file.basedir('/test/a') => '/test' + The returned path always end with a '/' so we have: + file.basedir(file.basedir(x)) == file.basedir(x) + */ + basedir = function (pathParam) { + var bd = '' + , parts + , part + , pos = 0 + , p = pathParam || ''; + + // If the path has a leading asterisk, basedir is the current dir + if (p.indexOf('*') == 0 || p.indexOf('**') == 0) { + return '.'; + } + + // always consider .. at the end as a folder and not a filename + if (/(?:^|\/|\\)\.\.$/.test(p.slice(-3))) { + p += '/'; + } + + parts = p.split(/\\|\//); + for (var i = 0, l = parts.length - 1; i < l; i++) { + part = parts[i]; + if (part.indexOf('*') > -1 || part.indexOf('**') > -1) { + break; + } + pos += part.length + 1; + bd += part + p[pos - 1]; + } + if (!bd) { + bd = '.'; + } + // Strip trailing slashes + if (!(bd == '\\' || bd == '/')) { + bd = bd.replace(/\\$|\/$/, ''); + } + return bd; + + }; + + // Return the contents of a given directory + _readDir = function (dirPath) { + var dir = path.normalize(dirPath) + , paths = [] + , ret = [dir] + , msg; + + try { + paths = fs.readdirSync(dir); + } + catch (e) { + msg = 'Could not read path ' + dir + '\n'; + if (e.stack) { + msg += e.stack; + } + throw new Error(msg); + } + + paths.forEach(function (p) { + var curr = path.join(dir, p); + var stat = fs.statSync(curr); + if (stat.isDirectory()) { + ret = ret.concat(_readDir(curr)); + } + else { + ret.push(curr); + } + }); + + return ret; + }; + + /** + @name file#readdirR + @function + @return {Array} Returns the contents as an Array, can be configured via opts.format + @description Reads the given directory returning it's contents + @param {String} dir The directory to read + @param {Object} opts Options to use + @param {String} [opts.format] Set the format to return(Default: Array) + */ + readdirR = function (dir, opts) { + var options = opts || {} + , format = options.format || 'array' + , ret; + ret = _readDir(dir); + return format == 'string' ? ret.join('\n') : ret; + }; + + +globSync = function (pat, opts) { + var dirname = basedir(pat) + , files + , matches; + + try { + files = readdirR(dirname).map(function(file){ + return file.replace(/\\/g, '/'); + }); + } + // Bail if path doesn't exist -- assume no files + catch(e) { + if (FileList.verbose) console.error(e.message); + } + + if (files) { + pat = path.normalize(pat); + matches = minimatch.match(files, pat, opts || {}); + } + return matches || []; +}; + +// Constants +// --------------- +// List of all the builtin Array methods we want to override +var ARRAY_METHODS = Object.getOwnPropertyNames(Array.prototype) +// Array methods that return a copy instead of affecting the original + , SPECIAL_RETURN = { + 'concat': true + , 'slice': true + , 'filter': true + , 'map': true + } +// Default file-patterns we want to ignore + , DEFAULT_IGNORE_PATTERNS = [ + /(^|[\/\\])CVS([\/\\]|$)/ + , /(^|[\/\\])\.svn([\/\\]|$)/ + , /(^|[\/\\])\.git([\/\\]|$)/ + , /\.bak$/ + , /~$/ + ] +// Ignore core files + , DEFAULT_IGNORE_FUNCS = [ + function (name) { + var isDir = false + , stats; + try { + stats = fs.statSync(name); + isDir = stats.isDirectory(); + } + catch(e) {} + return (/(^|[\/\\])core$/).test(name) && !isDir; + } + ]; + +var FileList = function () { + var self = this + , wrap; + + // List of glob-patterns or specific filenames + this.pendingAdd = []; + // Switched to false after lazy-eval of files + this.pending = true; + // Used to calculate exclusions from the list of files + this.excludes = { + pats: DEFAULT_IGNORE_PATTERNS.slice() + , funcs: DEFAULT_IGNORE_FUNCS.slice() + , regex: null + }; + this.items = []; + + // Wrap the array methods with the delegates + wrap = function (prop) { + var arr; + self[prop] = function () { + if (self.pending) { + self.resolve(); + } + if (typeof self.items[prop] == 'function') { + // Special method that return a copy + if (SPECIAL_RETURN[prop]) { + arr = self.items[prop].apply(self.items, arguments); + return FileList.clone(self, arr); + } + else { + return self.items[prop].apply(self.items, arguments); + } + } + else { + return self.items[prop]; + } + }; + }; + for (var i = 0, ii = ARRAY_METHODS.length; i < ii; i++) { + wrap(ARRAY_METHODS[i]); + } + + // Include whatever files got passed to the constructor + this.include.apply(this, arguments); + + // Fix constructor linkage + this.constructor = FileList; +}; + +FileList.prototype = new (function () { + var globPattern = /[*?\[\{]/; + + var _addMatching = function (item) { + var matches = globSync(item.path, item.options); + this.items = this.items.concat(matches); + } + + , _resolveAdd = function (item) { + if (globPattern.test(item.path)) { + _addMatching.call(this, item); + } + else { + this.push(item.path); + } + } + + , _calculateExcludeRe = function () { + var pats = this.excludes.pats + , pat + , excl = [] + , matches = []; + + for (var i = 0, ii = pats.length; i < ii; i++) { + pat = pats[i]; + if (typeof pat == 'string') { + // Glob, look up files + if (/[*?]/.test(pat)) { + matches = globSync(pat); + matches = matches.map(function (m) { + return escapeRegExpChars(m); + }); + excl = excl.concat(matches); + } + // String for regex + else { + excl.push(escapeRegExpChars(pat)); + } + } + // Regex, grab the string-representation + else if (pat instanceof RegExp) { + excl.push(pat.toString().replace(/^\/|\/$/g, '')); + } + } + if (excl.length) { + this.excludes.regex = new RegExp('(' + excl.join(')|(') + ')'); + } + else { + this.excludes.regex = /^$/; + } + } + + , _resolveExclude = function () { + var self = this; + _calculateExcludeRe.call(this); + // No `reject` method, so use reverse-filter + this.items = this.items.filter(function (name) { + return !self.shouldExclude(name); + }); + }; + + /** + * Includes file-patterns in the FileList. Should be called with one or more + * pattern for finding file to include in the list. Arguments should be strings + * for either a glob-pattern or a specific file-name, or an array of them + */ + this.include = function () { + var args = Array.prototype.slice.call(arguments) + , arg + , includes = { items: [], options: {} }; + + for (var i = 0, ilen = args.length; i < ilen; i++) { + arg = args[i]; + + if (typeof arg === 'object' && !Array.isArray(arg)) { + merge(includes.options, arg); + } else { + includes.items = includes.items.concat(arg).filter(function (item) { + return !!item; + }); + } + } + + var items = includes.items.map(function(item) { + return { path: item, options: includes.options }; + }); + + this.pendingAdd = this.pendingAdd.concat(items); + + return this; + }; + + /** + * Indicates whether a particular file would be filtered out by the current + * exclusion rules for this FileList. + * @param {String} name The filename to check + * @return {Boolean} Whether or not the file should be excluded + */ + this.shouldExclude = function (name) { + if (!this.excludes.regex) { + _calculateExcludeRe.call(this); + } + var excl = this.excludes; + return excl.regex.test(name) || excl.funcs.some(function (f) { + return !!f(name); + }); + }; + + /** + * Excludes file-patterns from the FileList. Should be called with one or more + * pattern for finding file to include in the list. Arguments can be: + * 1. Strings for either a glob-pattern or a specific file-name + * 2. Regular expression literals + * 3. Functions to be run on the filename that return a true/false + */ + this.exclude = function () { + var args = Array.isArray(arguments[0]) ? arguments[0] : arguments + , arg; + for (var i = 0, ii = args.length; i < ii; i++) { + arg = args[i]; + if (typeof arg == 'function' && !(arg instanceof RegExp)) { + this.excludes.funcs.push(arg); + } + else { + this.excludes.pats.push(arg); + } + } + if (!this.pending) { + _resolveExclude.call(this); + } + return this; + }; + + /** + * Populates the FileList from the include/exclude rules with a list of + * actual files + */ + this.resolve = function () { + var item + , uniqueFunc = function (p, c) { + if (p.indexOf(c) < 0) { + p.push(c); + } + return p; + }; + if (this.pending) { + this.pending = false; + while ((item = this.pendingAdd.shift())) { + _resolveAdd.call(this, item); + } + // Reduce to a unique list + this.items = this.items.reduce(uniqueFunc, []); + // Remove exclusions + _resolveExclude.call(this); + } + return this; + }; + + /** + * Convert to a plain-jane array + */ + this.toArray = function () { + // Call slice to ensure lazy-resolution before slicing items + var ret = this.slice().items.slice(); + return ret; + }; + + /** + * Clear any pending items -- only useful before + * calling `resolve` + */ + this.clearInclusions = function () { + this.pendingAdd = []; + return this; + }; + + /** + * Clear any current exclusion rules + */ + this.clearExclusions = function () { + this.excludes = { + pats: [] + , funcs: [] + , regex: null + }; + return this; + }; + +})(); + +// Static method, used to create copy returned by special +// array methods +FileList.clone = function (list, items) { + var clone = new FileList(); + if (items) { + clone.items = items; + } + clone.pendingAdd = list.pendingAdd; + clone.pending = list.pending; + for (var p in list.excludes) { + clone.excludes[p] = list.excludes[p]; + } + return clone; +}; + +FileList.verbose = true + +exports.FileList = FileList; diff --git a/node_modules/filelist/jakefile.js b/node_modules/filelist/jakefile.js new file mode 100644 index 0000000..18d4d7d --- /dev/null +++ b/node_modules/filelist/jakefile.js @@ -0,0 +1,15 @@ +testTask('FileList', function () { + this.testFiles.include('test/*.js'); +}); + +publishTask('FileList', function () { + this.packageFiles.include([ + 'jakefile.js', + 'README.md', + 'package.json', + 'index.js', + 'index.d.ts' + ]); +}); + + diff --git a/node_modules/filelist/package.json b/node_modules/filelist/package.json new file mode 100644 index 0000000..9921036 --- /dev/null +++ b/node_modules/filelist/package.json @@ -0,0 +1,28 @@ +{ + "name": "filelist", + "version": "1.0.2", + "description": "Lazy-evaluating list of files, based on globs or regex patterns", + "main": "index.js", + "types": "index.d.ts", + "scripts": { + "test": "jake test" + }, + "repository": { + "type": "git", + "url": "git://github.com/mde/filelist.git" + }, + "keywords": [ + "file", + "utility", + "glob" + ], + "author": "Matthew Eernisse <mde@fleegix.org> (http://fleegix.org)", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/mde/filelist/issues" + }, + "homepage": "https://github.com/mde/filelist", + "dependencies": { + "minimatch": "^3.0.4" + } +} diff --git a/node_modules/has-flag/index.js b/node_modules/has-flag/index.js new file mode 100644 index 0000000..5139728 --- /dev/null +++ b/node_modules/has-flag/index.js @@ -0,0 +1,8 @@ +'use strict'; +module.exports = (flag, argv) => { + argv = argv || process.argv; + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const pos = argv.indexOf(prefix + flag); + const terminatorPos = argv.indexOf('--'); + return pos !== -1 && (terminatorPos === -1 ? true : pos < terminatorPos); +}; diff --git a/node_modules/has-flag/license b/node_modules/has-flag/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/has-flag/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/has-flag/package.json b/node_modules/has-flag/package.json new file mode 100644 index 0000000..e1eb17a --- /dev/null +++ b/node_modules/has-flag/package.json @@ -0,0 +1,44 @@ +{ + "name": "has-flag", + "version": "3.0.0", + "description": "Check if argv has a specific flag", + "license": "MIT", + "repository": "sindresorhus/has-flag", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "has", + "check", + "detect", + "contains", + "find", + "flag", + "cli", + "command-line", + "argv", + "process", + "arg", + "args", + "argument", + "arguments", + "getopt", + "minimist", + "optimist" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/has-flag/readme.md b/node_modules/has-flag/readme.md new file mode 100644 index 0000000..677893c --- /dev/null +++ b/node_modules/has-flag/readme.md @@ -0,0 +1,70 @@ +# has-flag [![Build Status](https://travis-ci.org/sindresorhus/has-flag.svg?branch=master)](https://travis-ci.org/sindresorhus/has-flag) + +> Check if [`argv`](https://nodejs.org/docs/latest/api/process.html#process_process_argv) has a specific flag + +Correctly stops looking after an `--` argument terminator. + + +## Install + +``` +$ npm install has-flag +``` + + +## Usage + +```js +// foo.js +const hasFlag = require('has-flag'); + +hasFlag('unicorn'); +//=> true + +hasFlag('--unicorn'); +//=> true + +hasFlag('f'); +//=> true + +hasFlag('-f'); +//=> true + +hasFlag('foo=bar'); +//=> true + +hasFlag('foo'); +//=> false + +hasFlag('rainbow'); +//=> false +``` + +``` +$ node foo.js -f --unicorn --foo=bar -- --rainbow +``` + + +## API + +### hasFlag(flag, [argv]) + +Returns a boolean for whether the flag exists. + +#### flag + +Type: `string` + +CLI flag to look for. The `--` prefix is optional. + +#### argv + +Type: `string[]`<br> +Default: `process.argv` + +CLI arguments. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/jake/Makefile b/node_modules/jake/Makefile new file mode 100644 index 0000000..3d0574e --- /dev/null +++ b/node_modules/jake/Makefile @@ -0,0 +1,44 @@ +# +# Jake JavaScript build tool +# Copyright 2112 Matthew Eernisse (mde@fleegix.org) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +.PHONY: all build install clean uninstall + +PREFIX=/usr/local +DESTDIR= + +all: build + +build: + @echo 'Jake built.' + +install: + @mkdir -p $(DESTDIR)$(PREFIX)/bin && \ + mkdir -p $(DESTDIR)$(PREFIX)/lib/node_modules/jake && \ + mkdir -p ./node_modules && \ + npm install utilities minimatch && \ + cp -R ./* $(DESTDIR)$(PREFIX)/lib/node_modules/jake/ && \ + ln -snf ../lib/node_modules/jake/bin/cli.js $(DESTDIR)$(PREFIX)/bin/jake && \ + chmod 755 $(DESTDIR)$(PREFIX)/lib/node_modules/jake/bin/cli.js && \ + echo 'Jake installed.' + +clean: + @true + +uninstall: + @rm -f $(DESTDIR)$(PREFIX)/bin/jake && \ + rm -fr $(DESTDIR)$(PREFIX)/lib/node_modules/jake/ && \ + echo 'Jake uninstalled.' diff --git a/node_modules/jake/README.md b/node_modules/jake/README.md new file mode 100644 index 0000000..e938850 --- /dev/null +++ b/node_modules/jake/README.md @@ -0,0 +1,17 @@ +### Jake -- the JavaScript build tool for Node.js + +[![Build Status](https://travis-ci.org/jakejs/jake.svg?branch=master)](https://travis-ci.org/jakejs/jake) + +Documentation site at [http://jakejs.com](http://jakejs.com/) + +### Contributing +1. [Install node](http://nodejs.org/#download). +2. Clone this repository `$ git clone git@github.com:jakejs/jake.git`. +3. Install dependencies `$ npm install`. +4. Run tests with `$ npm test`. +5. Start Hacking! + +### License + +Licensed under the Apache License, Version 2.0 +(<http://www.apache.org/licenses/LICENSE-2.0>) diff --git a/node_modules/jake/bin/bash_completion.sh b/node_modules/jake/bin/bash_completion.sh new file mode 100644 index 0000000..bb25995 --- /dev/null +++ b/node_modules/jake/bin/bash_completion.sh @@ -0,0 +1,41 @@ +#!/bin/bash + +# http://stackoverflow.com/a/246128 +SOURCE="${BASH_SOURCE[0]}" +while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink + DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + SOURCE="$(readlink "$SOURCE")" + [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located +done +JAKE_BIN_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + +# http://stackoverflow.com/a/12495480 +# http://stackoverflow.com/a/28647824 +_auto_jake() +{ + local cur + local -a COMPGEN=() + _get_comp_words_by_ref -n : -c cur + + # run auto-completions in jake via our auto_complete.js wrapper + local -a auto_complete_info=( $(export COMP_LINE="${COMP_LINE}" && ${JAKE_BIN_DIR}/auto_complete.js "$cur" "${3}") ) + # check reply flag + local reply_flag="${auto_complete_info[0]}" + if [[ "${reply_flag}" == "no-complete" ]]; then + return 1 + fi + local auto_completions=("${auto_complete_info[@]:1}") + COMPGEN=( $(compgen -W "${auto_completions[*]}" -- "$cur") ) + COMPREPLY=( "${COMPGEN[@]}" ) + + __ltrim_colon_completions "$cur" + + # do we need another space?? + if [[ "${reply_flag}" == "yes-space" ]]; then + COMPREPLY=( "${COMPGEN[@]}" " " ) + fi + + return 0 +} + +complete -o default -F _auto_jake jake diff --git a/node_modules/jake/bin/cli.js b/node_modules/jake/bin/cli.js new file mode 100644 index 0000000..9f68abb --- /dev/null +++ b/node_modules/jake/bin/cli.js @@ -0,0 +1,31 @@ +#!/usr/bin/env node +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +// Try to load a local jake +try { + require(`${ process.cwd() }/node_modules/jake`); +} +// If that fails, likely running globally +catch(e) { + require('../lib/jake'); +} + +var args = process.argv.slice(2); + +jake.run.apply(jake, args); diff --git a/node_modules/jake/jakefile.js b/node_modules/jake/jakefile.js new file mode 100644 index 0000000..b0ae79b --- /dev/null +++ b/node_modules/jake/jakefile.js @@ -0,0 +1,105 @@ +let fs = require('fs') +let path = require('path'); +let proc = require('child_process'); + +const PROJECT_DIR = process.cwd(); +process.env.PROJECT_DIR = PROJECT_DIR; + +namespace('doc', function () { + task('generate', ['doc:clobber'], function () { + var cmd = '../node-jsdoc-toolkit/app/run.js -n -r=100 ' + + '-t=../node-jsdoc-toolkit/templates/codeview -d=./doc/ ./lib'; + jake.logger.log('Generating docs ...'); + jake.exec([cmd], function () { + jake.logger.log('Done.'); + complete(); + }); + }, {async: true}); + + task('clobber', function () { + var cmd = 'rm -fr ./doc/*'; + jake.exec([cmd], function () { + jake.logger.log('Clobbered old docs.'); + complete(); + }); + }, {async: true}); + +}); + +desc('Generate docs for Jake'); +task('doc', ['doc:generate']); + +npmPublishTask('jake', function () { + this.packageFiles.include([ + 'Makefile', + 'jakefile.js', + 'README.md', + 'package.json', + 'usage.txt', + 'lib/**', + 'bin/**', + 'test/**' + ]); + this.packageFiles.exclude([ + 'test/tmp' + ]); +}); + +jake.Task['publish:package'].directory = PROJECT_DIR; + +namespace('test', function () { + + let integrationTest = task('integration', ['publish:package'], async function () { + let pkg = JSON.parse(fs.readFileSync(`${PROJECT_DIR}/package.json`).toString()); + let version = pkg.version; + + proc.execSync('rm -rf ./node_modules'); + // Install from the actual package, run tests from the packaged binary + proc.execSync(`mkdir -p node_modules/.bin && mv ${PROJECT_DIR}/pkg/jake-v` + + `${version} node_modules/jake && ln -s ${process.cwd()}` + + '/node_modules/jake/bin/cli.js ./node_modules/.bin/jake'); + + let testArgs = []; + if (process.env.filter) { + testArgs.push(process.env.filter); + } + else { + testArgs.push('*.js'); + } + let spawned = proc.spawn(`${PROJECT_DIR}/node_modules/.bin/mocha`, testArgs, { + stdio: 'inherit' + }); + return new Promise((resolve, reject) => { + spawned.on('exit', () => { + if (!(process.env.noclobber || process.env.noClobber)) { + proc.execSync('rm -rf tmp_publish && rm -rf package.json' + + ' && rm -rf package-lock.json && rm -rf node_modules'); + // Rather than invoking 'clobber' task + jake.rmRf(`${PROJECT_DIR}/pkg`); + } + resolve(); + }); + }); + + }); + + integrationTest.directory = `${PROJECT_DIR}/test/integration`; + + let unitTest = task('unit', async function () { + let testArgs = []; + if (process.env.filter) { + testArgs.push(process.env.filter); + } + else { + testArgs.push('*.js'); + } + let spawned = proc.spawn(`${PROJECT_DIR}/node_modules/.bin/mocha`, testArgs, { + stdio: 'inherit' + }); + }); + + unitTest.directory = `${PROJECT_DIR}/test/unit`; +}); + +desc('Runs all tests'); +task('test', ['test:unit', 'test:integration']); diff --git a/node_modules/jake/lib/api.js b/node_modules/jake/lib/api.js new file mode 100644 index 0000000..9f09140 --- /dev/null +++ b/node_modules/jake/lib/api.js @@ -0,0 +1,409 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ +let { uuid } = require('./utils'); + +let api = new (function () { + /** + @name task + @static + @function + @description Creates a Jake Task + ` + @param {String} name The name of the Task + @param {Array} [prereqs] Prerequisites to be run before this task + @param {Function} [action] The action to perform for this task + @param {Object} [opts] + @param {Boolean} [opts.asyc=false] Perform this task asynchronously. + If you flag a task with this option, you must call the global + `complete` method inside the task's action, for execution to proceed + to the next task. + + @example + desc('This is the default task.'); + task('default', function (params) { + console.log('This is the default task.'); + }); + + desc('This task has prerequisites.'); + task('hasPrereqs', ['foo', 'bar', 'baz'], function (params) { + console.log('Ran some prereqs first.'); + }); + + desc('This is an asynchronous task.'); + task('asyncTask', function () { + setTimeout(complete, 1000); + }, {async: true}); + */ + this.task = function (name, prereqs, action, opts) { + let args = Array.prototype.slice.call(arguments); + let createdTask; + args.unshift('task'); + createdTask = jake.createTask.apply(global, args); + jake.currentTaskDescription = null; + return createdTask; + }; + + /** + @name rule + @static + @function + @description Creates a Jake Suffix Rule + ` + @param {String} pattern The suffix name of the objective + @param {String} source The suffix name of the objective + @param {Array} [prereqs] Prerequisites to be run before this task + @param {Function} [action] The action to perform for this task + @param {Object} [opts] + @param {Boolean} [opts.asyc=false] Perform this task asynchronously. + If you flag a task with this option, you must call the global + `complete` method inside the task's action, for execution to proceed + to the next task. + @example + desc('This is a rule, which does not support namespace or pattern.'); + rule('.o', '.c', {async: true}, function () { + let cmd = util.format('gcc -o %s %s', this.name, this.source); + jake.exec([cmd], function () { + complete(); + }, {printStdout: true}); + }); + + desc('This rule has prerequisites.'); + rule('.o', '.c', ['util.h'], {async: true}, function () { + let cmd = util.format('gcc -o %s %s', this.name, this.source); + jake.exec([cmd], function () { + complete(); + }, {printStdout: true}); + }); + + desc('This is a rule with patterns.'); + rule('%.o', '%.c', {async: true}, function () { + let cmd = util.format('gcc -o %s %s', this.name, this.source); + jake.exec([cmd], function () { + complete(); + }, {printStdout: true}); + }); + + desc('This is another rule with patterns.'); + rule('obj/%.o', 'src/%.c', {async: true}, function () { + let cmd = util.format('gcc -o %s %s', this.name, this.source); + jake.exec([cmd], function () { + complete(); + }, {printStdout: true}); + }); + + desc('This is an example with chain rules.'); + rule('%.pdf', '%.dvi', {async: true}, function () { + let cmd = util.format('dvipdfm %s',this.source); + jake.exec([cmd], function () { + complete(); + }, {printStdout: true}); + }); + + rule('%.dvi', '%.tex', {async: true}, function () { + let cmd = util.format('latex %s',this.source); + jake.exec([cmd], function () { + complete(); + }, {printStdout: true}); + }); + + desc('This rule has a namespace.'); + task('default', ['debug:obj/main.o]); + + namespace('debug', {async: true}, function() { + rule('obj/%.o', 'src/%.c', function () { + // ... + }); + } + */ + this.rule = function () { + let args = Array.prototype.slice.call(arguments); + let arg; + let pattern = args.shift(); + let source = args.shift(); + let prereqs = []; + let action = function () {}; + let opts = {}; + let key = pattern.toString(); // May be a RegExp + + while ((arg = args.shift())) { + if (typeof arg == 'function') { + action = arg; + } + else if (Array.isArray(arg)) { + prereqs = arg; + } + else { + opts = arg; + } + } + + jake.currentNamespace.rules[key] = new jake.Rule({ + pattern: pattern, + source: source, + prereqs: prereqs, + action: action, + opts: opts, + desc: jake.currentTaskDescription, + ns: jake.currentNamespace + }); + jake.currentTaskDescription = null; + }; + + /** + @name directory + @static + @function + @description Creates a Jake DirectoryTask. Can be used as a prerequisite + for FileTasks, or for simply ensuring a directory exists for use with a + Task's action. + ` + @param {String} name The name of the DiretoryTask + + @example + + // Creates the package directory for distribution + directory('pkg'); + */ + this.directory = function (name) { + let args = Array.prototype.slice.call(arguments); + let createdTask; + args.unshift('directory'); + createdTask = jake.createTask.apply(global, args); + jake.currentTaskDescription = null; + return createdTask; + }; + + /** + @name file + @static + @function + @description Creates a Jake FileTask. + ` + @param {String} name The name of the FileTask + @param {Array} [prereqs] Prerequisites to be run before this task + @param {Function} [action] The action to create this file, if it doesn't + exist already. + @param {Object} [opts] + @param {Array} [opts.asyc=false] Perform this task asynchronously. + If you flag a task with this option, you must call the global + `complete` method inside the task's action, for execution to proceed + to the next task. + + */ + this.file = function (name, prereqs, action, opts) { + let args = Array.prototype.slice.call(arguments); + let createdTask; + args.unshift('file'); + createdTask = jake.createTask.apply(global, args); + jake.currentTaskDescription = null; + return createdTask; + }; + + /** + @name desc + @static + @function + @description Creates a description for a Jake Task (or FileTask, + DirectoryTask). When invoked, the description that iscreated will + be associated with whatever Task is created next. + ` + @param {String} description The description for the Task + */ + this.desc = function (description) { + jake.currentTaskDescription = description; + }; + + /** + @name namespace + @static + @function + @description Creates a namespace which allows logical grouping + of tasks, and prevents name-collisions with task-names. Namespaces + can be nested inside of other namespaces. + ` + @param {String} name The name of the namespace + @param {Function} scope The enclosing scope for the namespaced tasks + + @example + namespace('doc', function () { + task('generate', ['doc:clobber'], function () { + // Generate some docs + }); + + task('clobber', function () { + // Clobber the doc directory first + }); + }); + */ + this.namespace = function (name, closure) { + let curr = jake.currentNamespace; + let ns = curr.childNamespaces[name] || new jake.Namespace(name, curr); + let fn = closure || function () {}; + curr.childNamespaces[name] = ns; + jake.currentNamespace = ns; + fn(); + jake.currentNamespace = curr; + jake.currentTaskDescription = null; + return ns; + }; + + /** + @name complete + @static + @function + @description Completes an asynchronous task, allowing Jake's + execution to proceed to the next task. Calling complete globally or without + arguments completes the last task on the invocationChain. If you use parallel + execution of prereqs this will probably complete a wrong task. You should call this + function with this task as the first argument, before the optional return value. + Alternatively you can call task.complete() + ` + @example + task('generate', ['doc:clobber'], function () { + exec('./generate_docs.sh', function (err, stdout, stderr) { + if (err || stderr) { + fail(err || stderr); + } + else { + console.log(stdout); + complete(); + } + }); + }, {async: true}); + */ + this.complete = function (task, val) { + //this should detect if the first arg is a task, but I guess it should be more thorough + if(task && task. _currentPrereqIndex >=0 ) { + task.complete(val); + } + else { + val = task; + if(jake._invocationChain.length > 0) { + jake._invocationChain[jake._invocationChain.length-1].complete(val); + } + } + }; + + /** + @name fail + @static + @function + @description Causes Jake execution to abort with an error. + Allows passing an optional error code, which will be used to + set the exit-code of exiting process. + ` + @param {Error|String} err The error to thow when aborting execution. + If this argument is an Error object, it will simply be thrown. If + a String, it will be used as the error-message. (If it is a multi-line + String, the first line will be used as the Error message, and the + remaining lines will be used as the error-stack.) + + @example + task('createTests, function () { + if (!fs.existsSync('./tests')) { + fail('Test directory does not exist.'); + } + else { + // Do some testing stuff ... + } + }); + */ + this.fail = function (err, code) { + let msg; + let errObj; + if (code) { + jake.errorCode = code; + } + if (err) { + if (typeof err == 'string') { + // Use the initial or only line of the error as the error-message + // If there was a multi-line error, use the rest as the stack + msg = err.split('\n'); + errObj = new Error(msg.shift()); + if (msg.length) { + errObj.stack = msg.join('\n'); + } + throw errObj; + } + else if (err instanceof Error) { + throw err; + } + else { + throw new Error(err.toString()); + } + } + else { + throw new Error(); + } + }; + + this.packageTask = function (name, version, prereqs, definition) { + return new jake.PackageTask(name, version, prereqs, definition); + }; + + this.publishTask = function (name, prereqs, opts, definition) { + return new jake.PublishTask(name, prereqs, opts, definition); + }; + + // Backward-compat + this.npmPublishTask = function (name, prereqs, opts, definition) { + return new jake.PublishTask(name, prereqs, opts, definition); + }; + + this.testTask = function () { + let ctor = function () {}; + let t; + ctor.prototype = jake.TestTask.prototype; + t = new ctor(); + jake.TestTask.apply(t, arguments); + return t; + }; + + this.setTaskTimeout = function (t) { + this._taskTimeout = t; + }; + + this.setSeriesAutoPrefix = function (prefix) { + this._seriesAutoPrefix = prefix; + }; + + this.series = function (...args) { + let prereqs = args.map((arg) => { + let name = (this._seriesAutoPrefix || '') + arg.name; + jake.task(name, arg); + return name; + }); + let seriesName = uuid(); + let seriesTask = jake.task(seriesName, prereqs); + seriesTask._internal = true; + let res = function () { + return new Promise((resolve) => { + seriesTask.invoke(); + seriesTask.on('complete', (val) => { + resolve(val); + }); + }); + }; + Object.defineProperty(res, 'name', {value: uuid(), + writable: false}); + return res; + }; + +})(); + +module.exports = api; diff --git a/node_modules/jake/lib/jake.js b/node_modules/jake/lib/jake.js new file mode 100644 index 0000000..a463163 --- /dev/null +++ b/node_modules/jake/lib/jake.js @@ -0,0 +1,319 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +if (!global.jake) { + + let EventEmitter = require('events').EventEmitter; + // And so it begins + global.jake = new EventEmitter(); + + let fs = require('fs'); + let chalk = require('chalk'); + let taskNs = require('./task'); + let Task = taskNs.Task; + let FileTask = taskNs.FileTask; + let DirectoryTask = taskNs.DirectoryTask; + let Rule = require('./rule').Rule; + let Namespace = require('./namespace').Namespace; + let RootNamespace = require('./namespace').RootNamespace; + let api = require('./api'); + let utils = require('./utils'); + let Program = require('./program').Program; + let loader = require('./loader')(); + let pkg = JSON.parse(fs.readFileSync(__dirname + '/../package.json').toString()); + + const MAX_RULE_RECURSION_LEVEL = 16; + + // Globalize jake and top-level API methods (e.g., `task`, `desc`) + Object.assign(global, api); + + // Copy utils onto base jake + jake.logger = utils.logger; + jake.exec = utils.exec; + + // File utils should be aliased directly on base jake as well + Object.assign(jake, utils.file); + + // Also add top-level API methods to exported object for those who don't want to + // use the globals (`file` here will overwrite the 'file' utils namespace) + Object.assign(jake, api); + + Object.assign(jake, new (function () { + + this._invocationChain = []; + this._taskTimeout = 30000; + + // Public properties + // ================= + this.version = pkg.version; + // Used when Jake exits with a specific error-code + this.errorCode = null; + // Loads Jakefiles/jakelibdirs + this.loader = loader; + // The root of all ... namespaces + this.rootNamespace = new RootNamespace(); + // Non-namespaced tasks are placed into the default + this.defaultNamespace = this.rootNamespace; + // Start in the default + this.currentNamespace = this.defaultNamespace; + // Saves the description created by a 'desc' call that prefaces a + // 'task' call that defines a task. + this.currentTaskDescription = null; + this.program = new Program(); + this.FileList = require('filelist').FileList; + this.PackageTask = require('./package_task').PackageTask; + this.PublishTask = require('./publish_task').PublishTask; + this.TestTask = require('./test_task').TestTask; + this.Task = Task; + this.FileTask = FileTask; + this.DirectoryTask = DirectoryTask; + this.Namespace = Namespace; + this.Rule = Rule; + + this.parseAllTasks = function () { + let _parseNs = function (ns) { + let nsTasks = ns.tasks; + let nsNamespaces = ns.childNamespaces; + for (let q in nsTasks) { + let nsTask = nsTasks[q]; + jake.Task[nsTask.fullName] = nsTask; + } + for (let p in nsNamespaces) { + let nsNamespace = nsNamespaces[p]; + _parseNs(nsNamespace); + } + }; + _parseNs(jake.defaultNamespace); + }; + + /** + * Displays the list of descriptions avaliable for tasks defined in + * a Jakefile + */ + this.showAllTaskDescriptions = function (f) { + let p; + let maxTaskNameLength = 0; + let task; + let padding; + let name; + let descr; + let filter = typeof f == 'string' ? f : null; + + for (p in jake.Task) { + if (!Object.prototype.hasOwnProperty.call(jake.Task, p)) { + continue; + } + if (filter && p.indexOf(filter) == -1) { + continue; + } + task = jake.Task[p]; + // Record the length of the longest task name -- used for + // pretty alignment of the task descriptions + if (task.description) { + maxTaskNameLength = p.length > maxTaskNameLength ? + p.length : maxTaskNameLength; + } + } + // Print out each entry with descriptions neatly aligned + for (p in jake.Task) { + if (!Object.prototype.hasOwnProperty.call(jake.Task, p)) { + continue; + } + if (filter && p.indexOf(filter) == -1) { + continue; + } + task = jake.Task[p]; + + //name = '\033[32m' + p + '\033[39m '; + name = chalk.green(p); + + descr = task.description; + if (descr) { + descr = chalk.gray('# ' + descr); + + // Create padding-string with calculated length + padding = (new Array(maxTaskNameLength - p.length + 2)).join(' '); + + console.log('jake ' + name + padding + descr); + } + } + }; + + this.createTask = function () { + let args = Array.prototype.slice.call(arguments); + let arg; + let obj; + let task; + let type; + let name; + let action; + let opts = {}; + let prereqs = []; + + type = args.shift(); + + // name, [deps], [action] + // Name (string) + deps (array) format + if (typeof args[0] == 'string') { + name = args.shift(); + if (Array.isArray(args[0])) { + prereqs = args.shift(); + } + } + // name:deps, [action] + // Legacy object-literal syntax, e.g.: {'name': ['depA', 'depB']} + else { + obj = args.shift(); + for (let p in obj) { + prereqs = prereqs.concat(obj[p]); + name = p; + } + } + + // Optional opts/callback or callback/opts + while ((arg = args.shift())) { + if (typeof arg == 'function') { + action = arg; + } + else { + opts = Object.assign(Object.create(null), arg); + } + } + + task = jake.currentNamespace.resolveTask(name); + if (task && !action) { + // Task already exists and no action, just update prereqs, and return it. + task.prereqs = task.prereqs.concat(prereqs); + return task; + } + + switch (type) { + case 'directory': + action = function () { + jake.mkdirP(name); + }; + task = new DirectoryTask(name, prereqs, action, opts); + break; + case 'file': + task = new FileTask(name, prereqs, action, opts); + break; + default: + task = new Task(name, prereqs, action, opts); + } + + jake.currentNamespace.addTask(task); + + if (jake.currentTaskDescription) { + task.description = jake.currentTaskDescription; + jake.currentTaskDescription = null; + } + + // FIXME: Should only need to add a new entry for the current + // task-definition, not reparse the entire structure + jake.parseAllTasks(); + + return task; + }; + + this.attemptRule = function (name, ns, level) { + let prereqRule; + let prereq; + if (level > MAX_RULE_RECURSION_LEVEL) { + return null; + } + // Check Rule + prereqRule = ns.matchRule(name); + if (prereqRule) { + prereq = prereqRule.createTask(name, level); + } + return prereq || null; + }; + + this.createPlaceholderFileTask = function (name, namespace) { + let parsed = name.split(':'); + let filePath = parsed.pop(); // Strip any namespace + let task; + + task = namespace.resolveTask(name); + + // If there's not already an existing dummy FileTask for it, + // create one + if (!task) { + // Create a dummy FileTask only if file actually exists + if (fs.existsSync(filePath)) { + task = new jake.FileTask(filePath); + task.dummy = true; + let ns; + if (parsed.length) { + ns = namespace.resolveNamespace(parsed.join(':')); + } + else { + ns = namespace; + } + if (!namespace) { + throw new Error('Invalid namespace, cannot add FileTask'); + } + ns.addTask(task); + // Put this dummy Task in the global Tasks list so + // modTime will be eval'd correctly + jake.Task[`${ns.path}:${filePath}`] = task; + } + } + + return task || null; + }; + + + this.run = function () { + let args = Array.prototype.slice.call(arguments); + let program = this.program; + let loader = this.loader; + let preempt; + let opts; + + program.parseArgs(args); + program.init(); + + preempt = program.firstPreemptiveOption(); + if (preempt) { + preempt(); + } + else { + opts = program.opts; + // jakefile flag set but no jakefile yet + if (opts.autocomplete && opts.jakefile === true) { + process.stdout.write('no-complete'); + return; + } + // Load Jakefile and jakelibdir files + let jakefileLoaded = loader.loadFile(opts.jakefile); + let jakelibdirLoaded = loader.loadDirectory(opts.jakelibdir); + + if(!jakefileLoaded && !jakelibdirLoaded && !opts.autocomplete) { + fail('No Jakefile. Specify a valid path with -f/--jakefile, ' + + 'or place one in the current directory.'); + } + + program.run(); + } + }; + + })()); +} + +module.exports = jake; diff --git a/node_modules/jake/lib/loader.js b/node_modules/jake/lib/loader.js new file mode 100644 index 0000000..02ad262 --- /dev/null +++ b/node_modules/jake/lib/loader.js @@ -0,0 +1,165 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +let path = require('path'); +let fs = require('fs'); +let existsSync = fs.existsSync; +let utils = require('./utils'); + +// Files like jakelib/foobar.jake.js +const JAKELIB_FILE_PAT = /\.jake$|\.js$/; +const SUPPORTED_EXTENSIONS = { + 'js': null, + 'coffee': function () { + try { + let cs = require('coffeescript'); + if (typeof cs.register == 'function') { + cs.register(); + } + } + catch(e) { + throw new Error('You have a CoffeeScript Jakefile, but have not installed CoffeeScript'); + } + }, + 'ls': function () { + try { + require('livescript'); + } + catch (e) { + throw new Error('You have a LiveScript Jakefile, but have not installed LiveScript'); + } + } +}; +const IMPLICIT_JAKEFILE_NAMES = [ + 'Jakefile', + 'Gulpfile' +]; + +let Loader = function () { + // Load a Jakefile, running the code inside -- this may result in + // tasks getting defined using the original Jake API, e.g., + // `task('foo' ['bar', 'baz']);`, or can also auto-create tasks + // from any functions exported from the file + function loadFile(filePath) { + let exported = require(filePath); + for (let [key, value] of Object.entries(exported)) { + let t; + if (typeof value == 'function') { + t = jake.task(key, value); + t.description = '(Exported function)'; + } + } + } + + function fileExists(name) { + let nameWithExt = null; + // Support no file extension as well + let exts = Object.keys(SUPPORTED_EXTENSIONS).concat(['']); + exts.some((ext) => { + let fname = ext ? `${name}.${ext}` : name; + if (existsSync(fname)) { + nameWithExt = fname; + return true; + } + }); + return nameWithExt; + } + + // Recursive + function findImplicitJakefile() { + let cwd = process.cwd(); + let names = IMPLICIT_JAKEFILE_NAMES; + let found = null; + names.some((name) => { + let n; + // Prefer all-lowercase + n = name.toLowerCase(); + if ((found = fileExists(n))) { + return found; + } + // Check mixed-case as well + n = name; + if ((found = fileExists(n))) { + return found; + } + }); + if (found) { + return found; + } + else { + process.chdir(".."); + // If we've walked all the way up the directory tree, + // bail out with no result + if (cwd === process.cwd()) { + return null; + } + return findImplicitJakefile(); + } + } + + this.loadFile = function (fileSpecified) { + let jakefile; + let origCwd = process.cwd(); + + if (fileSpecified) { + if (existsSync(fileSpecified)) { + jakefile = fileSpecified; + } + } + else { + jakefile = findImplicitJakefile(); + } + + if (jakefile) { + let ext = jakefile.split('.')[1]; + let loaderFunc = SUPPORTED_EXTENSIONS[ext]; + loaderFunc && loaderFunc(); + + loadFile(utils.file.absolutize(jakefile)); + return true; + } + else { + if (!fileSpecified) { + // Restore the working directory on failure + process.chdir(origCwd); + } + return false; + } + }; + + this.loadDirectory = function (d) { + let dirname = d || 'jakelib'; + let dirlist; + dirname = utils.file.absolutize(dirname); + if (existsSync(dirname)) { + dirlist = fs.readdirSync(dirname); + dirlist.forEach(function (filePath) { + if (JAKELIB_FILE_PAT.test(filePath)) { + loadFile(path.join(dirname, filePath)); + } + }); + return true; + } + return false; + }; + +}; + +module.exports = function () { + return new Loader(); +}; diff --git a/node_modules/jake/lib/namespace.js b/node_modules/jake/lib/namespace.js new file mode 100644 index 0000000..a3c2787 --- /dev/null +++ b/node_modules/jake/lib/namespace.js @@ -0,0 +1,115 @@ +const ROOT_NAMESPACE_NAME = '__rootNamespace__'; + +class Namespace { + constructor(name, parentNamespace) { + this.name = name; + this.parentNamespace = parentNamespace; + this.childNamespaces = {}; + this.tasks = {}; + this.rules = {}; + this.path = this.getPath(); + } + + get fullName() { + return this._getFullName(); + } + + addTask(task) { + this.tasks[task.name] = task; + task.namespace = this; + } + + resolveTask(name) { + if (!name) { + return; + } + + let taskPath = name.split(':'); + let taskName = taskPath.pop(); + let task; + let ns; + + // Namespaced, return either relative to current, or from root + if (taskPath.length) { + taskPath = taskPath.join(':'); + ns = this.resolveNamespace(taskPath) || + Namespace.ROOT_NAMESPACE.resolveNamespace(taskPath); + task = (ns && ns.resolveTask(taskName)); + } + // Bare task, return either local, or top-level + else { + task = this.tasks[name] || Namespace.ROOT_NAMESPACE.tasks[name]; + } + + return task || null; + } + + + resolveNamespace(relativeName) { + if (!relativeName) { + return this; + } + + let parts = relativeName.split(':'); + let ns = this; + + for (let i = 0, ii = parts.length; (ns && i < ii); i++) { + ns = ns.childNamespaces[parts[i]]; + } + + return ns || null; + } + + matchRule(relativeName) { + let parts = relativeName.split(':'); + parts.pop(); + let ns = this.resolveNamespace(parts.join(':')); + let rules = ns ? ns.rules : []; + let r; + let match; + + for (let p in rules) { + r = rules[p]; + if (r.match(relativeName)) { + match = r; + } + } + + return (ns && match) || + (this.parentNamespace && + this.parentNamespace.matchRule(relativeName)); + } + + getPath() { + let parts = []; + let next = this.parentNamespace; + while (next) { + parts.push(next.name); + next = next.parentNamespace; + } + parts.pop(); // Remove '__rootNamespace__' + return parts.reverse().join(':'); + } + + _getFullName() { + let path = this.path; + path = (path && path.split(':')) || []; + path.push(this.name); + return path.join(':'); + } + + isRootNamespace() { + return !this.parentNamespace; + } +} + +class RootNamespace extends Namespace { + constructor() { + super(ROOT_NAMESPACE_NAME, null); + Namespace.ROOT_NAMESPACE = this; + } +} + +module.exports.Namespace = Namespace; +module.exports.RootNamespace = RootNamespace; + diff --git a/node_modules/jake/lib/package_task.js b/node_modules/jake/lib/package_task.js new file mode 100644 index 0000000..527aca7 --- /dev/null +++ b/node_modules/jake/lib/package_task.js @@ -0,0 +1,406 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +let path = require('path'); +let fs = require('fs'); +let exec = require('child_process').exec; +let FileList = require('filelist').FileList; + +/** + @name jake + @namespace jake +*/ +/** + @name jake.PackageTask + @constructor + @description Instantiating a PackageTask creates a number of Jake + Tasks that make packaging and distributing your software easy. + + @param {String} name The name of the project + @param {String} version The current project version (will be + appended to the project-name in the package-archive + @param {Function} definition Defines the contents of the package, + and format of the package-archive. Will be executed on the instantiated + PackageTask (i.e., 'this', will be the PackageTask instance), + to set the various instance-propertiess. + + @example + let t = new jake.PackageTask('rous', 'v' + version, function () { + let files = [ + 'Capfile' + , 'Jakefile' + , 'README.md' + , 'package.json' + , 'app/*' + , 'bin/*' + , 'config/*' + , 'lib/*' + , 'node_modules/*' + ]; + this.packageFiles.include(files); + this.packageFiles.exclude('node_modules/foobar'); + this.needTarGz = true; + }); + + */ +let PackageTask = function () { + let args = Array.prototype.slice.call(arguments); + let name = args.shift(); + let version = args.shift(); + let definition = args.pop(); + let prereqs = args.pop() || []; // Optional + + prereqs = [].concat(prereqs); // Accept string or list + + /** + @name jake.PackageTask#name + @public + @type {String} + @description The name of the project + */ + this.name = name; + /** + @name jake.PackageTask#version + @public + @type {String} + @description The project version-string + */ + this.version = version; + /** + @name jake.PackageTask#prereqs + @public + @type {Array} + @description Tasks to run before packaging + */ + this.prereqs = prereqs; + /** + @name jake.PackageTask#packageDir + @public + @type {String='pkg'} + @description The directory-name to use for packaging the software + */ + this.packageDir = 'pkg'; + /** + @name jake.PackageTask#packageFiles + @public + @type {jake.FileList} + @description The list of files and directories to include in the + package-archive + */ + this.packageFiles = new FileList(); + /** + @name jake.PackageTask#needTar + @public + @type {Boolean=false} + @description If set to true, uses the `tar` utility to create + a gzip .tgz archive of the package + */ + this.needTar = false; + /** + @name jake.PackageTask#needTarGz + @public + @type {Boolean=false} + @description If set to true, uses the `tar` utility to create + a gzip .tar.gz archive of the package + */ + this.needTarGz = false; + /** + @name jake.PackageTask#needTarBz2 + @public + @type {Boolean=false} + @description If set to true, uses the `tar` utility to create + a bzip2 .bz2 archive of the package + */ + this.needTarBz2 = false; + /** + @name jake.PackageTask#needJar + @public + @type {Boolean=false} + @description If set to true, uses the `jar` utility to create + a .jar archive of the package + */ + this.needJar = false; + /** + @name jake.PackageTask#needZip + @public + @type {Boolean=false} + @description If set to true, uses the `zip` utility to create + a .zip archive of the package + */ + this.needZip = false; + /** + @name jake.PackageTask#manifestFile + @public + @type {String=null} + @description Can be set to point the `jar` utility at a manifest + file to use in a .jar archive. If unset, one will be automatically + created by the `jar` utility. This path should be relative to the + root of the package directory (this.packageDir above, likely 'pkg') + */ + this.manifestFile = null; + /** + @name jake.PackageTask#tarCommand + @public + @type {String='tar'} + @description The shell-command to use for creating tar archives. + */ + this.tarCommand = 'tar'; + /** + @name jake.PackageTask#jarCommand + @public + @type {String='jar'} + @description The shell-command to use for creating jar archives. + */ + this.jarCommand = 'jar'; + /** + @name jake.PackageTask#zipCommand + @public + @type {String='zip'} + @description The shell-command to use for creating zip archives. + */ + this.zipCommand = 'zip'; + /** + @name jake.PackageTask#archiveNoBaseDir + @public + @type {Boolean=false} + @description Simple option for performing the archive on the + contents of the directory instead of the directory itself + */ + this.archiveNoBaseDir = false; + /** + @name jake.PackageTask#archiveChangeDir + @public + @type {String=null} + @description Equivalent to the '-C' command for the `tar` and `jar` + commands. ("Change to this directory before adding files.") + */ + this.archiveChangeDir = null; + /** + @name jake.PackageTask#archiveContentDir + @public + @type {String=null} + @description Specifies the files and directories to include in the + package-archive. If unset, this will default to the main package + directory -- i.e., name + version. + */ + this.archiveContentDir = null; + + if (typeof definition == 'function') { + definition.call(this); + } + this.define(); +}; + +PackageTask.prototype = new (function () { + + let _compressOpts = { + Tar: { + ext: '.tgz', + flags: 'czf', + cmd: 'tar' + }, + TarGz: { + ext: '.tar.gz', + flags: 'czf', + cmd: 'tar' + }, + TarBz2: { + ext: '.tar.bz2', + flags: 'cjf', + cmd: 'tar' + }, + Jar: { + ext: '.jar', + flags: 'cf', + cmd: 'jar' + }, + Zip: { + ext: '.zip', + flags: 'qr', + cmd: 'zip' + } + }; + + this.define = function () { + let self = this; + let packageDirPath = this.packageDirPath(); + let compressTaskArr = []; + + desc('Build the package for distribution'); + task('package', self.prereqs.concat(['clobberPackage', 'buildPackage'])); + // Backward-compat alias + task('repackage', ['package']); + + task('clobberPackage', function () { + jake.rmRf(self.packageDir, {silent: true}); + }); + + desc('Remove the package'); + task('clobber', ['clobberPackage']); + + let doCommand = function (p) { + let filename = path.resolve(self.packageDir + '/' + self.packageName() + + _compressOpts[p].ext); + if (process.platform == 'win32') { + // Windows full path may have drive letter, which is going to cause + // namespace problems, so strip it. + if (filename.length > 2 && filename[1] == ':') { + filename = filename.substr(2); + } + } + compressTaskArr.push(filename); + + file(filename, [packageDirPath], function () { + let cmd; + let opts = _compressOpts[p]; + // Directory to move to when doing the compression-task + // Changes in the case of zip for emulating -C option + let chdir = self.packageDir; + // Save the current dir so it's possible to pop back up + // after compressing + let currDir = process.cwd(); + let archiveChangeDir; + let archiveContentDir; + + if (self.archiveNoBaseDir) { + archiveChangeDir = self.packageName(); + archiveContentDir = '.'; + } + else { + archiveChangeDir = self.archiveChangeDir; + archiveContentDir = self.archiveContentDir; + } + + cmd = self[opts.cmd + 'Command']; + cmd += ' -' + opts.flags; + if (opts.cmd == 'jar' && self.manifestFile) { + cmd += 'm'; + } + + // The name of the archive to create -- use full path + // so compression can be performed from a different dir + // if needed + cmd += ' ' + filename; + + if (opts.cmd == 'jar' && self.manifestFile) { + cmd += ' ' + self.manifestFile; + } + + // Where to perform the compression -- -C option isn't + // supported in zip, so actually do process.chdir for this + if (archiveChangeDir) { + if (opts.cmd == 'zip') { + chdir = path.join(chdir, archiveChangeDir); + } + else { + cmd += ' -C ' + archiveChangeDir; + } + } + + // Where to get the archive content + if (archiveContentDir) { + cmd += ' ' + archiveContentDir; + } + else { + cmd += ' ' + self.packageName(); + } + + // Move into the desired dir (usually packageDir) to compress + // Return back up to the current dir after the exec + process.chdir(chdir); + + exec(cmd, function (err, stdout, stderr) { + if (err) { throw err; } + + // Return back up to the starting directory (see above, + // before exec) + process.chdir(currDir); + + complete(); + }); + }, {async: true}); + }; + + for (let p in _compressOpts) { + if (this['need' + p]) { + doCommand(p); + } + } + + task('buildPackage', compressTaskArr, function () {}); + + directory(this.packageDir); + + file(packageDirPath, this.packageFiles, function () { + jake.mkdirP(packageDirPath); + let fileList = []; + self.packageFiles.forEach(function (name) { + let f = path.join(self.packageDirPath(), name); + let fDir = path.dirname(f); + jake.mkdirP(fDir, {silent: true}); + + // Add both files and directories + fileList.push({ + from: name, + to: f + }); + }); + let _copyFile = function () { + let file = fileList.pop(); + let stat; + if (file) { + stat = fs.statSync(file.from); + // Target is a directory, just create it + if (stat.isDirectory()) { + jake.mkdirP(file.to, {silent: true}); + _copyFile(); + } + // Otherwise copy the file + else { + jake.cpR(file.from, file.to, {silent: true}); + _copyFile(); + } + } + else { + complete(); + } + }; + _copyFile(); + }, {async: true}); + + + }; + + this.packageName = function () { + if (this.version) { + return this.name + '-' + this.version; + } + else { + return this.name; + } + }; + + this.packageDirPath = function () { + return this.packageDir + '/' + this.packageName(); + }; + +})(); + +jake.PackageTask = PackageTask; +exports.PackageTask = PackageTask; + diff --git a/node_modules/jake/lib/parseargs.js b/node_modules/jake/lib/parseargs.js new file mode 100644 index 0000000..1bd24c9 --- /dev/null +++ b/node_modules/jake/lib/parseargs.js @@ -0,0 +1,134 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +let parseargs = {}; +let isOpt = function (arg) { return arg.indexOf('-') === 0 }; +let removeOptPrefix = function (opt) { return opt.replace(/^--/, '').replace(/^-/, '') }; + +/** + * @constructor + * Parses a list of command-line args into a key/value object of + * options and an array of positional commands. + * @ param {Array} opts A list of options in the following format: + * [{full: 'foo', abbr: 'f'}, {full: 'bar', abbr: 'b'}]] + */ +parseargs.Parser = function (opts) { + // A key/value object of matching options parsed out of the args + this.opts = {}; + this.taskNames = null; + this.envVars = null; + + // Data structures used for parsing + this.reg = opts; + this.shortOpts = {}; + this.longOpts = {}; + + let self = this; + [].forEach.call(opts, function (item) { + self.shortOpts[item.abbr] = item; + self.longOpts[item.full] = item; + }); +}; + +parseargs.Parser.prototype = new function () { + + let _trueOrNextVal = function (argParts, args) { + if (argParts[1]) { + return argParts[1]; + } + else { + return (!args[0] || isOpt(args[0])) ? + true : args.shift(); + } + }; + + /** + * Parses an array of arguments into options and positional commands + * @param {Array} args The command-line args to parse + */ + this.parse = function (args) { + let cmds = []; + let cmd; + let envVars = {}; + let opts = {}; + let arg; + let argItem; + let argParts; + let cmdItems; + let taskNames = []; + let preempt; + + while (args.length) { + arg = args.shift(); + + if (isOpt(arg)) { + arg = removeOptPrefix(arg); + argParts = arg.split('='); + argItem = this.longOpts[argParts[0]] || this.shortOpts[argParts[0]]; + if (argItem) { + // First-encountered preemptive opt takes precedence -- no further opts + // or possibility of ambiguity, so just look for a value, or set to + // true and then bail + if (argItem.preempts) { + opts[argItem.full] = _trueOrNextVal(argParts, args); + preempt = true; + break; + } + // If the opt requires a value, see if we can get a value from the + // next arg, or infer true from no-arg -- if it's followed by another + // opt, throw an error + if (argItem.expectValue || argItem.allowValue) { + opts[argItem.full] = _trueOrNextVal(argParts, args); + if (argItem.expectValue && !opts[argItem.full]) { + throw new Error(argItem.full + ' option expects a value.'); + } + } + else { + opts[argItem.full] = true; + } + } + } + else { + cmds.unshift(arg); + } + } + + if (!preempt) { + // Parse out any env-vars and task-name + while ((cmd = cmds.pop())) { + cmdItems = cmd.split('='); + if (cmdItems.length > 1) { + envVars[cmdItems[0]] = cmdItems[1]; + } + else { + taskNames.push(cmd); + } + } + + } + + return { + opts: opts, + envVars: envVars, + taskNames: taskNames + }; + }; + +}; + +module.exports = parseargs; diff --git a/node_modules/jake/lib/program.js b/node_modules/jake/lib/program.js new file mode 100644 index 0000000..121632f --- /dev/null +++ b/node_modules/jake/lib/program.js @@ -0,0 +1,282 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +let fs = require('fs'); +let parseargs = require('./parseargs'); +let utils = require('./utils'); +let Program; +let usage = require('fs').readFileSync(`${__dirname}/../usage.txt`).toString(); +let { Task } = require('./task/task'); + +function die(msg) { + console.log(msg); + process.stdout.write('', function () { + process.stderr.write('', function () { + process.exit(); + }); + }); +} + +let preempts = { + version: function () { + die(jake.version); + }, + help: function () { + die(usage); + } +}; + +let AVAILABLE_OPTS = [ + { full: 'jakefile', + abbr: 'f', + expectValue: true + }, + { full: 'quiet', + abbr: 'q', + expectValue: false + }, + { full: 'directory', + abbr: 'C', + expectValue: true + }, + { full: 'always-make', + abbr: 'B', + expectValue: false + }, + { full: 'tasks', + abbr: 'T', + expectValue: false, + allowValue: true + }, + // Alias t + { full: 'tasks', + abbr: 't', + expectValue: false, + allowValue: true + }, + // Alias ls + { full: 'tasks', + abbr: 'ls', + expectValue: false, + allowValue: true + }, + { full: 'help', + abbr: 'h', + }, + { full: 'version', + abbr: 'V', + }, + // Alias lowercase v + { full: 'version', + abbr: 'v', + }, + { full: 'jakelibdir', + abbr: 'J', + expectValue: true + }, + { full: 'allow-rejection', + abbr: 'ar', + expectValue: false + } +]; + +Program = function () { + this.availableOpts = AVAILABLE_OPTS; + this.opts = {}; + this.taskNames = null; + this.taskArgs = null; + this.envVars = null; + this.die = die; +}; + +Program.prototype = new (function () { + + this.handleErr = function (err) { + if (jake.listeners('error').length !== 0) { + jake.emit('error', err); + return; + } + + if (jake.listeners('error').length) { + jake.emit('error', err); + return; + } + + utils.logger.error('jake aborted.'); + if (err.stack) { + utils.logger.error(err.stack); + } + else { + utils.logger.error(err.message); + } + + process.stdout.write('', function () { + process.stderr.write('', function () { + jake.errorCode = jake.errorCode || 1; + process.exit(jake.errorCode); + }); + }); + }; + + this.parseArgs = function (args) { + let result = (new parseargs.Parser(this.availableOpts)).parse(args); + this.setOpts(result.opts); + this.setTaskNames(result.taskNames); + this.setEnvVars(result.envVars); + }; + + this.setOpts = function (options) { + let opts = options || {}; + Object.assign(this.opts, opts); + }; + + this.internalOpts = function (options) { + this.availableOpts = this.availableOpts.concat(options); + }; + + this.autocompletions = function (cur) { + let p; let i; let task; + let commonPrefix = ''; + let matches = []; + + for (p in jake.Task) { + task = jake.Task[p]; + if ( + 'fullName' in task + && ( + // if empty string, program converts to true + cur === true || + task.fullName.indexOf(cur) === 0 + ) + ) { + if (matches.length === 0) { + commonPrefix = task.fullName; + } + else { + for (i = commonPrefix.length; i > -1; --i) { + commonPrefix = commonPrefix.substr(0, i); + if (task.fullName.indexOf(commonPrefix) === 0) { + break; + } + } + } + matches.push(task.fullName); + } + } + + if (matches.length > 1 && commonPrefix === cur) { + matches.unshift('yes-space'); + } + else { + matches.unshift('no-space'); + } + + process.stdout.write(matches.join(' ')); + }; + + this.setTaskNames = function (names) { + if (names && !Array.isArray(names)) { + throw new Error('Task names must be an array'); + } + this.taskNames = (names && names.length) ? names : ['default']; + }; + + this.setEnvVars = function (vars) { + this.envVars = vars || null; + }; + + this.firstPreemptiveOption = function () { + let opts = this.opts; + for (let p in opts) { + if (preempts[p]) { + return preempts[p]; + } + } + return false; + }; + + this.init = function (configuration) { + let self = this; + let config = configuration || {}; + if (config.options) { + this.setOpts(config.options); + } + if (config.taskNames) { + this.setTaskNames(config.taskNames); + } + if (config.envVars) { + this.setEnvVars(config.envVars); + } + process.addListener('uncaughtException', function (err) { + self.handleErr(err); + }); + if (!this.opts['allow-rejection']) { + process.addListener('unhandledRejection', (reason, promise) => { + utils.logger.error('Unhandled rejection at:', promise, 'reason:', reason); + self.handleErr(reason); + }); + } + if (this.envVars) { + Object.assign(process.env, this.envVars); + } + }; + + this.run = function () { + let rootTask; + let taskNames; + let dirname; + let opts = this.opts; + + if (opts.autocomplete) { + return this.autocompletions(opts['autocomplete-cur'], opts['autocomplete-prev']); + } + // Run with `jake -T`, just show descriptions + if (opts.tasks) { + return jake.showAllTaskDescriptions(opts.tasks); + } + + taskNames = this.taskNames; + if (!(Array.isArray(taskNames) && taskNames.length)) { + throw new Error('Please pass jake.runTasks an array of task-names'); + } + + // Set working dir + dirname = opts.directory; + if (dirname) { + if (fs.existsSync(dirname) && + fs.statSync(dirname).isDirectory()) { + process.chdir(dirname); + } + else { + throw new Error(dirname + ' is not a valid directory path'); + } + } + + rootTask = task(Task.ROOT_TASK_NAME, taskNames, function () {}); + rootTask._internal = true; + + rootTask.once('complete', function () { + jake.emit('complete'); + }); + jake.emit('start'); + rootTask.invoke(); + }; + +})(); + +module.exports.Program = Program; diff --git a/node_modules/jake/lib/publish_task.js b/node_modules/jake/lib/publish_task.js new file mode 100644 index 0000000..f0cacfd --- /dev/null +++ b/node_modules/jake/lib/publish_task.js @@ -0,0 +1,290 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +let fs = require('fs'); +let path = require('path'); +let exec = require('child_process').execSync; +let FileList = require('filelist').FileList; + +let PublishTask = function () { + let args = Array.prototype.slice.call(arguments).filter(function (item) { + return typeof item != 'undefined'; + }); + let arg; + let opts = {}; + let definition; + let prereqs = []; + let createDef = function (arg) { + return function () { + this.packageFiles.include(arg); + }; + }; + + this.name = args.shift(); + + // Old API, just name + list of files + if (args.length == 1 && (Array.isArray(args[0]) || typeof args[0] == 'string')) { + definition = createDef(args.pop()); + } + // Current API, name + [prereqs] + [opts] + definition + else { + while ((arg = args.pop())) { + // Definition func + if (typeof arg == 'function') { + definition = arg; + } + // Prereqs + else if (Array.isArray(arg) || typeof arg == 'string') { + prereqs = arg; + } + // Opts + else { + opts = arg; + } + } + } + + this.prereqs = prereqs; + this.packageFiles = new FileList(); + this.publishCmd = opts.publishCmd || 'npm publish %filename'; + this.publishMessage = opts.publishMessage || 'BOOM! Published.'; + this.gitCmd = opts.gitCmd || 'git'; + this.versionFiles = opts.versionFiles || ['package.json']; + this.scheduleDelay = 5000; + + // Override utility funcs for testing + this._ensureRepoClean = function (stdout) { + if (stdout.length) { + fail(new Error('Git repository is not clean.')); + } + }; + this._getCurrentBranch = function (stdout) { + return String(stdout).trim(); + }; + + if (typeof definition == 'function') { + definition.call(this); + } + this.define(); +}; + + +PublishTask.prototype = new (function () { + + let _currentBranch = null; + + let getPackage = function () { + let pkg = JSON.parse(fs.readFileSync(path.join(process.cwd(), + '/package.json')).toString()); + return pkg; + }; + let getPackageVersionNumber = function () { + return getPackage().version; + }; + + this.define = function () { + let self = this; + + namespace('publish', function () { + task('fetchTags', function () { + // Make sure local tags are up to date + exec(self.gitCmd + ' fetch --tags'); + console.log('Fetched remote tags.'); + }); + + task('getCurrentBranch', function () { + // Figure out what branch to push to + let stdout = exec(self.gitCmd + ' symbolic-ref --short HEAD').toString(); + if (!stdout) { + throw new Error('No current Git branch found'); + } + _currentBranch = self._getCurrentBranch(stdout); + console.log('On branch ' + _currentBranch); + }); + + task('ensureClean', function () { + // Only bump, push, and tag if the Git repo is clean + let stdout = exec(self.gitCmd + ' status --porcelain --untracked-files=no').toString(); + // Throw if there's output + self._ensureRepoClean(stdout); + }); + + task('updateVersionFiles', function () { + let pkg; + let version; + let arr; + let patch; + + // Grab the current version-string + pkg = getPackage(); + version = pkg.version; + // Increment the patch-number for the version + arr = version.split('.'); + patch = parseInt(arr.pop(), 10) + 1; + arr.push(patch); + version = arr.join('.'); + + // Update package.json or other files with the new version-info + self.versionFiles.forEach(function (file) { + let p = path.join(process.cwd(), file); + let data = JSON.parse(fs.readFileSync(p).toString()); + data.version = version; + fs.writeFileSync(p, JSON.stringify(data, true, 2) + '\n'); + }); + // Return the version string so that listeners for the 'complete' event + // for this task can use it (e.g., to update other files before pushing + // to Git) + return version; + }); + + task('pushVersion', ['ensureClean', 'updateVersionFiles'], function () { + let version = getPackageVersionNumber(); + let message = 'Version ' + version; + let cmds = [ + self.gitCmd + ' commit -a -m "' + message + '"', + self.gitCmd + ' push origin ' + _currentBranch, + self.gitCmd + ' tag -a v' + version + ' -m "' + message + '"', + self.gitCmd + ' push --tags' + ]; + cmds.forEach((cmd) => { + exec(cmd); + }); + version = getPackageVersionNumber(); + console.log('Bumped version number to v' + version + '.'); + }); + + let defineTask = task('definePackage', function () { + let version = getPackageVersionNumber(); + new jake.PackageTask(self.name, 'v' + version, self.prereqs, function () { + // Replace the PackageTask's FileList with the PublishTask's FileList + this.packageFiles = self.packageFiles; + this.needTarGz = true; // Default to tar.gz + // If any of the need<CompressionFormat> or archive opts are set + // proxy them to the PackageTask + for (let p in this) { + if (p.indexOf('need') === 0 || p.indexOf('archive') === 0) { + if (typeof self[p] != 'undefined') { + this[p] = self[p]; + } + } + } + }); + }); + defineTask._internal = true; + + task('package', function () { + let definePack = jake.Task['publish:definePackage']; + let pack = jake.Task['package']; + let version = getPackageVersionNumber(); + + // May have already been run + if (definePack.taskStatus == jake.Task.runStatuses.DONE) { + definePack.reenable(true); + } + definePack.invoke(); + // Set manually, completion happens in next tick, creating deadlock + definePack.taskStatus = jake.Task.runStatuses.DONE; + pack.invoke(); + console.log('Created package for ' + self.name + ' v' + version); + }); + + task('publish', function () { + return new Promise((resolve) => { + let version = getPackageVersionNumber(); + let filename; + let cmd; + + console.log('Publishing ' + self.name + ' v' + version); + + if (typeof self.createPublishCommand == 'function') { + cmd = self.createPublishCommand(version); + } + else { + filename = './pkg/' + self.name + '-v' + version + '.tar.gz'; + cmd = self.publishCmd.replace(/%filename/gi, filename); + } + + if (typeof cmd == 'function') { + cmd(function (err) { + if (err) { + throw err; + } + console.log(self.publishMessage); + resolve(); + }); + } + else { + // Hackity hack -- NPM publish sometimes returns errror like: + // Error sending version data\nnpm ERR! + // Error: forbidden 0.2.4 is modified, should match modified time + setTimeout(function () { + let stdout = exec(cmd).toString() || ''; + stdout = stdout.trim(); + if (stdout) { + console.log(stdout); + } + console.log(self.publishMessage); + resolve(); + }, self.scheduleDelay); + } + }); + }); + + task('cleanup', function () { + return new Promise((resolve) => { + let clobber = jake.Task.clobber; + clobber.reenable(true); + clobber.on('complete', function () { + console.log('Cleaned up package'); + resolve(); + }); + clobber.invoke(); + }); + }); + + }); + + let prefixNs = function (item) { + return 'publish:' + item; + }; + + // Create aliases in the default namespace + desc('Create a new version and release.'); + task('publish', self.prereqs.concat(['version', 'release'] + .map(prefixNs))); + + desc('Release the existing version.'); + task('publishExisting', self.prereqs.concat(['release'] + .map(prefixNs))); + + task('version', ['fetchTags', 'getCurrentBranch', 'pushVersion'] + .map(prefixNs)); + + task('release', ['package', 'publish', 'cleanup'] + .map(prefixNs)); + + // Invoke proactively so there will be a callable 'package' task + // which can be used apart from 'publish' + jake.Task['publish:definePackage'].invoke(); + }; + +})(); + +jake.PublishTask = PublishTask; +exports.PublishTask = PublishTask; + diff --git a/node_modules/jake/lib/rule.js b/node_modules/jake/lib/rule.js new file mode 100644 index 0000000..25f51ae --- /dev/null +++ b/node_modules/jake/lib/rule.js @@ -0,0 +1,311 @@ +let path = require('path'); +let fs = require('fs'); +let Task = require('./task/task').Task; + +// Split a task to two parts, name space and task name. +// For example, given 'foo:bin/a%.c', return an object with +// - 'ns' : foo +// - 'name' : bin/a%.c +function splitNs(task) { + let parts = task.split(':'); + let name = parts.pop(); + let ns = resolveNs(parts); + return { + 'name' : name, + 'ns' : ns + }; +} + +// Return the namespace based on an array of names. +// For example, given ['foo', 'baz' ], return the namespace +// +// default -> foo -> baz +// +// where default is the global root namespace +// and -> means child namespace. +function resolveNs(parts) { + let ns = jake.defaultNamespace; + for(let i = 0, l = parts.length; ns && i < l; i++) { + ns = ns.childNamespaces[parts[i]]; + } + return ns; +} + +// Given a pattern p, say 'foo:bin/a%.c' +// Return an object with +// - 'ns' : foo +// - 'dir' : bin +// - 'prefix' : a +// - 'suffix' : .c +function resolve(p) { + let task = splitNs(p); + let name = task.name; + let ns = task.ns; + let split = path.basename(name).split('%'); + return { + ns: ns, + dir: path.dirname(name), + prefix: split[0], + suffix: split[1] + }; +} + +// Test whether string a is a suffix of string b +function stringEndWith(a, b) { + let l; + return (l = b.lastIndexOf(a)) == -1 ? false : l + a.length == b.length; +} + +// Replace the suffix a of the string s with b. +// Note that, it is assumed a is a suffix of s. +function stringReplaceSuffix(s, a, b) { + return s.slice(0, s.lastIndexOf(a)) + b; +} + +class Rule { + constructor(opts) { + this.pattern = opts.pattern; + this.source = opts.source; + this.prereqs = opts.prereqs; + this.action = opts.action; + this.opts = opts.opts; + this.desc = opts.desc; + this.ns = opts.ns; + } + + // Create a file task based on this rule for the specified + // task-name + // ====== + // FIXME: Right now this just throws away any passed-in args + // for the synthsized task (taskArgs param) + // ====== + createTask(fullName, level) { + let self = this; + let pattern; + let source; + let action; + let opts; + let prereqs; + let valid; + let src; + let tNs; + let createdTask; + let name = Task.getBaseTaskName(fullName); + let nsPath = Task.getBaseNamespacePath(fullName); + let ns = this.ns.resolveNamespace(nsPath); + + pattern = this.pattern; + source = this.source; + + if (typeof source == 'string') { + src = Rule.getSource(name, pattern, source); + } + else { + src = source(name); + } + + // TODO: Write a utility function that appends a + // taskname to a namespace path + src = nsPath.split(':').filter(function (item) { + return !!item; + }).concat(src).join(':'); + + // Generate the prerequisite for the matching task. + // It is the original prerequisites plus the prerequisite + // representing source file, i.e., + // + // rule( '%.o', '%.c', ['some.h'] ... + // + // If the objective is main.o, then new task should be + // + // file( 'main.o', ['main.c', 'some.h' ] ... + prereqs = this.prereqs.slice(); // Get a copy to work with + prereqs.unshift(src); + + // Prereq should be: + // 1. an existing task + // 2. an existing file on disk + // 3. a valid rule (i.e., not at too deep a level) + valid = prereqs.some(function (p) { + let ns = self.ns; + return ns.resolveTask(p) || + fs.existsSync(Task.getBaseTaskName(p)) || + jake.attemptRule(p, ns, level + 1); + }); + + // If any of the prereqs aren't valid, the rule isn't valid + if (!valid) { + return null; + } + // Otherwise, hunky-dory, finish creating the task for the rule + else { + // Create the action for the task + action = function () { + let task = this; + self.action.apply(task); + }; + + opts = this.opts; + + // Insert the file task into Jake + // + // Since createTask function stores the task as a child task + // of currentNamespace. Here we temporariliy switch the namespace. + // FIXME: Should allow optional ns passed in instead of this hack + tNs = jake.currentNamespace; + jake.currentNamespace = ns; + createdTask = jake.createTask('file', name, prereqs, action, opts); + createdTask.source = src.split(':').pop(); + jake.currentNamespace = tNs; + + return createdTask; + } + } + + match(name) { + return Rule.match(this.pattern, name); + } + + // Test wether the a prerequisite matchs the pattern. + // The arg 'pattern' does not have namespace as prefix. + // For example, the following tests are true + // + // pattern | name + // bin/%.o | bin/main.o + // bin/%.o | foo:bin/main.o + // + // The following tests are false (trivally) + // + // pattern | name + // bin/%.o | foobin/main.o + // bin/%.o | bin/main.oo + static match(pattern, name) { + let p; + let task; + let obj; + let filename; + + if (pattern instanceof RegExp) { + return pattern.test(name); + } + else if (pattern.indexOf('%') == -1) { + // No Pattern. No Folder. No Namespace. + // A Simple Suffix Rule. Just test suffix + return stringEndWith(pattern, name); + } + else { + // Resolve the dir, prefix and suffix of pattern + p = resolve(pattern); + + // Resolve the namespace and task-name + task = splitNs(name); + name = task.name; + + // Set the objective as the task-name + obj = name; + + // Namespace is already matched. + + // Check dir + if (path.dirname(obj) != p.dir) { + return false; + } + + filename = path.basename(obj); + + // Check file name length + if ((p.prefix.length + p.suffix.length + 1) > filename.length) { + // Length does not match. + return false; + } + + // Check prefix + if (filename.indexOf(p.prefix) !== 0) { + return false; + } + + // Check suffix + if (!stringEndWith(p.suffix, filename)) { + return false; + } + + // OK. Find a match. + return true; + } + } + + // Generate the source based on + // - name name for the synthesized task + // - pattern pattern for the objective + // - source pattern for the source + // + // Return the source with properties + // - dep the prerequisite of source + // (with the namespace) + // + // - file the file name of source + // (without the namespace) + // + // For example, given + // + // - name foo:bin/main.o + // - pattern bin/%.o + // - source src/%.c + // + // return 'foo:src/main.c', + // + static getSource(name, pattern, source) { + let dep; + let pat; + let match; + let file; + let src; + + // Regex pattern -- use to look up the extension + if (pattern instanceof RegExp) { + match = pattern.exec(name); + if (match) { + if (typeof source == 'function') { + src = source(name); + } + else { + src = stringReplaceSuffix(name, match[0], source); + } + } + } + // Assume string + else { + // Simple string suffix replacement + if (pattern.indexOf('%') == -1) { + if (typeof source == 'function') { + src = source(name); + } + else { + src = stringReplaceSuffix(name, pattern, source); + } + } + // Percent-based substitution + else { + pat = pattern.replace('%', '(.*?)'); + pat = new RegExp(pat); + match = pat.exec(name); + if (match) { + if (typeof source == 'function') { + src = source(name); + } + else { + file = match[1]; + file = source.replace('%', file); + dep = match[0]; + src = name.replace(dep, file); + } + } + } + } + + return src; + } +} + + +exports.Rule = Rule; diff --git a/node_modules/jake/lib/task/directory_task.js b/node_modules/jake/lib/task/directory_task.js new file mode 100644 index 0000000..b17b624 --- /dev/null +++ b/node_modules/jake/lib/task/directory_task.js @@ -0,0 +1,30 @@ +let fs = require('fs'); +let FileTask = require('./file_task').FileTask; + +/** + @name jake + @namespace jake +*/ +/** + @name jake.DirectoryTask + @constructor + @augments EventEmitter + @augments jake.Task + @augments jake.FileTask + @description A Jake DirectoryTask + + @param {String} name The name of the directory to create. + */ +class DirectoryTask extends FileTask { + constructor(...args) { + super(...args); + if (fs.existsSync(this.name)) { + this.updateModTime(); + } + else { + this.modTime = null; + } + } +} + +exports.DirectoryTask = DirectoryTask; diff --git a/node_modules/jake/lib/task/file_task.js b/node_modules/jake/lib/task/file_task.js new file mode 100644 index 0000000..6fad84b --- /dev/null +++ b/node_modules/jake/lib/task/file_task.js @@ -0,0 +1,124 @@ +let fs = require('fs'); +let Task = require('./task').Task; + +function isFileOrDirectory(t) { + return (t instanceof FileTask || + t instanceof DirectoryTask); +} + +function isFile(t) { + return (t instanceof FileTask && !(t instanceof DirectoryTask)); +} + +/** + @name jake + @namespace jake +*/ +/** + @name jake.FileTask + @class` + @extentds Task + @description A Jake FileTask + + @param {String} name The name of the Task + @param {Array} [prereqs] Prerequisites to be run before this task + @param {Function} [action] The action to perform to create this file + @param {Object} [opts] + @param {Array} [opts.asyc=false] Perform this task asynchronously. + If you flag a task with this option, you must call the global + `complete` method inside the task's action, for execution to proceed + to the next task. + */ +class FileTask extends Task { + constructor(...args) { + super(...args); + this.dummy = false; + if (fs.existsSync(this.name)) { + this.updateModTime(); + } + else { + this.modTime = null; + } + } + + isNeeded() { + let prereqs = this.prereqs; + let prereqName; + let prereqTask; + + // No repeatsies + if (this.taskStatus == Task.runStatuses.DONE) { + return false; + } + // The always-make override + else if (jake.program.opts['always-make']) { + return true; + } + // Default case + else { + + // We need either an existing file, or an action to create one. + // First try grabbing the actual mod-time of the file + try { + this.updateModTime(); + } + // Then fall back to looking for an action + catch(e) { + if (typeof this.action == 'function') { + return true; + } + else { + throw new Error('File-task ' + this.fullName + ' has no ' + + 'existing file, and no action to create one.'); + } + } + + // Compare mod-time of all the prereqs with its mod-time + // If any prereqs are newer, need to run the action to update + if (prereqs && prereqs.length) { + for (let i = 0, ii = prereqs.length; i < ii; i++) { + prereqName = prereqs[i]; + prereqTask = this.namespace.resolveTask(prereqName) || + jake.createPlaceholderFileTask(prereqName, this.namespace); + // Run the action if: + // 1. The prereq is a normal task (not file/dir) + // 2. The prereq is a file-task with a mod-date more recent than + // the one for this file/dir + if (prereqTask) { + if (!isFileOrDirectory(prereqTask) || + (isFile(prereqTask) && prereqTask.modTime > this.modTime)) { + return true; + } + } + } + } + // File/dir has no prereqs, and exists -- no need to run + else { + // Effectively done + this.taskStatus = Task.runStatuses.DONE; + return false; + } + } + } + + updateModTime() { + let stats = fs.statSync(this.name); + this.modTime = stats.mtime; + } + + complete() { + if (!this.dummy) { + this.updateModTime(); + } + // Hackity hack + Task.prototype.complete.apply(this, arguments); + } + +} + +exports.FileTask = FileTask; + +// DirectoryTask is a subclass of FileTask, depends on it +// being defined +let DirectoryTask = require('./directory_task').DirectoryTask; + diff --git a/node_modules/jake/lib/task/index.js b/node_modules/jake/lib/task/index.js new file mode 100644 index 0000000..bc93f41 --- /dev/null +++ b/node_modules/jake/lib/task/index.js @@ -0,0 +1,9 @@ + +let Task = require('./task').Task; +let FileTask = require('./file_task').FileTask; +let DirectoryTask = require('./directory_task').DirectoryTask; + +exports.Task = Task; +exports.FileTask = FileTask; +exports.DirectoryTask = DirectoryTask; + diff --git a/node_modules/jake/lib/task/task.js b/node_modules/jake/lib/task/task.js new file mode 100644 index 0000000..9e8886f --- /dev/null +++ b/node_modules/jake/lib/task/task.js @@ -0,0 +1,439 @@ +let EventEmitter = require('events').EventEmitter; +let async = require('async'); +let chalk = require('chalk'); +// 'rule' module is required at the bottom because circular deps + +// Used for task value, so better not to use +// null, since value should be unset/uninitialized +let UNDEFINED_VALUE; + +const ROOT_TASK_NAME = '__rootTask__'; +const POLLING_INTERVAL = 100; + +// Parse any positional args attached to the task-name +function parsePrereqName(name) { + let taskArr = name.split('['); + let taskName = taskArr[0]; + let taskArgs = []; + if (taskArr[1]) { + taskArgs = taskArr[1].replace(/\]$/, ''); + taskArgs = taskArgs.split(','); + } + return { + name: taskName, + args: taskArgs + }; +} + +/** + @name jake.Task + @class + @extends EventEmitter + @description A Jake Task + + @param {String} name The name of the Task + @param {Array} [prereqs] Prerequisites to be run before this task + @param {Function} [action] The action to perform for this task + @param {Object} [opts] + @param {Array} [opts.asyc=false] Perform this task asynchronously. + If you flag a task with this option, you must call the global + `complete` method inside the task's action, for execution to proceed + to the next task. + */ +class Task extends EventEmitter { + + constructor(name, prereqs, action, options) { + // EventEmitter ctor takes no args + super(); + + if (name.indexOf(':') > -1) { + throw new Error('Task name cannot include a colon. It is used internally as namespace delimiter.'); + } + let opts = options || {}; + + this._currentPrereqIndex = 0; + this._internal = false; + this._skipped = false; + + this.name = name; + this.prereqs = prereqs; + this.action = action; + this.async = false; + this.taskStatus = Task.runStatuses.UNSTARTED; + this.description = null; + this.args = []; + this.value = UNDEFINED_VALUE; + this.concurrency = 1; + this.startTime = null; + this.endTime = null; + this.directory = null; + this.namespace = null; + + // Support legacy async-flag -- if not explicitly passed or falsy, will + // be set to empty-object + if (typeof opts == 'boolean' && opts === true) { + this.async = true; + } + else { + if (opts.async) { + this.async = true; + } + if (opts.concurrency) { + this.concurrency = opts.concurrency; + } + } + + //Do a test on self dependencies for this task + if(Array.isArray(this.prereqs) && this.prereqs.indexOf(this.name) !== -1) { + throw new Error("Cannot use prereq " + this.name + " as a dependency of itself"); + } + } + + get fullName() { + return this._getFullName(); + } + + _initInvocationChain() { + // Legacy global invocation chain + jake._invocationChain.push(this); + + // New root chain + if (!this._invocationChain) { + this._invocationChainRoot = true; + this._invocationChain = []; + if (jake.currentRunningTask) { + jake.currentRunningTask._waitForChains = jake.currentRunningTask._waitForChains || []; + jake.currentRunningTask._waitForChains.push(this._invocationChain); + } + } + } + + /** + @name jake.Task#invoke + @function + @description Runs prerequisites, then this task. If the task has already + been run, will not run the task again. + */ + invoke() { + this._initInvocationChain(); + + this.args = Array.prototype.slice.call(arguments); + this.reenabled = false + this.runPrereqs(); + } + + /** + @name jake.Task#execute + @function + @description Run only this task, without prereqs. If the task has already + been run, *will* run the task again. + */ + execute() { + this._initInvocationChain(); + + this.args = Array.prototype.slice.call(arguments); + this.reenable(); + this.reenabled = true + this.run(); + } + + runPrereqs() { + if (this.prereqs && this.prereqs.length) { + + if (this.concurrency > 1) { + async.eachLimit(this.prereqs, this.concurrency, + + (name, cb) => { + let parsed = parsePrereqName(name); + + let prereq = this.namespace.resolveTask(parsed.name) || + jake.attemptRule(name, this.namespace, 0) || + jake.createPlaceholderFileTask(name, this.namespace); + + if (!prereq) { + throw new Error('Unknown task "' + name + '"'); + } + + //Test for circular invocation + if(prereq === this) { + setImmediate(function () { + cb(new Error("Cannot use prereq " + prereq.name + " as a dependency of itself")); + }); + } + + if (prereq.taskStatus == Task.runStatuses.DONE) { + //prereq already done, return + setImmediate(cb); + } + else { + //wait for complete before calling cb + prereq.once('_done', () => { + prereq.removeAllListeners('_done'); + setImmediate(cb); + }); + // Start the prereq if we are the first to encounter it + if (prereq.taskStatus === Task.runStatuses.UNSTARTED) { + prereq.taskStatus = Task.runStatuses.STARTED; + prereq.invoke.apply(prereq, parsed.args); + } + } + }, + + (err) => { + //async callback is called after all prereqs have run. + if (err) { + throw err; + } + else { + setImmediate(this.run.bind(this)); + } + } + ); + } + else { + setImmediate(this.nextPrereq.bind(this)); + } + } + else { + setImmediate(this.run.bind(this)); + } + } + + nextPrereq() { + let self = this; + let index = this._currentPrereqIndex; + let name = this.prereqs[index]; + let prereq; + let parsed; + + if (name) { + + parsed = parsePrereqName(name); + + prereq = this.namespace.resolveTask(parsed.name) || + jake.attemptRule(name, this.namespace, 0) || + jake.createPlaceholderFileTask(name, this.namespace); + + if (!prereq) { + throw new Error('Unknown task "' + name + '"'); + } + + // Do when done + if (prereq.taskStatus == Task.runStatuses.DONE) { + self.handlePrereqDone(prereq); + } + else { + prereq.once('_done', () => { + this.handlePrereqDone(prereq); + prereq.removeAllListeners('_done'); + }); + if (prereq.taskStatus == Task.runStatuses.UNSTARTED) { + prereq.taskStatus = Task.runStatuses.STARTED; + prereq._invocationChain = this._invocationChain; + prereq.invoke.apply(prereq, parsed.args); + } + } + } + } + + /** + @name jake.Task#reenable + @function + @description Reenables a task so that it can be run again. + */ + reenable(deep) { + let prereqs; + let prereq; + this._skipped = false; + this.taskStatus = Task.runStatuses.UNSTARTED; + this.value = UNDEFINED_VALUE; + if (deep && this.prereqs) { + prereqs = this.prereqs; + for (let i = 0, ii = prereqs.length; i < ii; i++) { + prereq = jake.Task[prereqs[i]]; + if (prereq) { + prereq.reenable(deep); + } + } + } + } + + handlePrereqDone(prereq) { + this._currentPrereqIndex++; + if (this._currentPrereqIndex < this.prereqs.length) { + setImmediate(this.nextPrereq.bind(this)); + } + else { + setImmediate(this.run.bind(this)); + } + } + + isNeeded() { + let needed = true; + if (this.taskStatus == Task.runStatuses.DONE) { + needed = false; + } + return needed; + } + + run() { + let val, previous; + let hasAction = typeof this.action == 'function'; + + if (!this.isNeeded()) { + this.emit('skip'); + this.emit('_done'); + } + else { + if (this._invocationChain.length) { + previous = this._invocationChain[this._invocationChain.length - 1]; + // If this task is repeating and its previous is equal to this, don't check its status because it was set to UNSTARTED by the reenable() method + if (!(this.reenabled && previous == this)) { + if (previous.taskStatus != Task.runStatuses.DONE) { + let now = (new Date()).getTime(); + if (now - this.startTime > jake._taskTimeout) { + return jake.fail(`Timed out waiting for task: ${previous.name} with status of ${previous.taskStatus}`); + } + setTimeout(this.run.bind(this), POLLING_INTERVAL); + return; + } + } + } + if (!(this.reenabled && previous == this)) { + this._invocationChain.push(this); + } + + if (!(this._internal || jake.program.opts.quiet)) { + console.log("Starting '" + chalk.green(this.fullName) + "'..."); + } + + this.startTime = (new Date()).getTime(); + this.emit('start'); + + jake.currentRunningTask = this; + + if (hasAction) { + try { + if (this.directory) { + process.chdir(this.directory); + } + + val = this.action.apply(this, this.args); + + if (typeof val == 'object' && typeof val.then == 'function') { + this.async = true; + + val.then( + (result) => { + setImmediate(() => { + this.complete(result); + }); + }, + (err) => { + setImmediate(() => { + this.errorOut(err); + }); + }); + } + } + catch (err) { + this.errorOut(err); + return; // Bail out, not complete + } + } + + if (!(hasAction && this.async)) { + setImmediate(() => { + this.complete(val); + }); + } + } + } + + errorOut(err) { + this.taskStatus = Task.runStatuses.ERROR; + this._invocationChain.chainStatus = Task.runStatuses.ERROR; + this.emit('error', err); + } + + complete(val) { + + if (Array.isArray(this._waitForChains)) { + let stillWaiting = this._waitForChains.some((chain) => { + return !(chain.chainStatus == Task.runStatuses.DONE || + chain.chainStatus == Task.runStatuses.ERROR); + }); + if (stillWaiting) { + let now = (new Date()).getTime(); + let elapsed = now - this.startTime; + if (elapsed > jake._taskTimeout) { + return jake.fail(`Timed out waiting for task: ${this.name} with status of ${this.taskStatus}. Elapsed: ${elapsed}`); + } + setTimeout(() => { + this.complete(val); + }, POLLING_INTERVAL); + return; + } + } + + jake._invocationChain.splice(jake._invocationChain.indexOf(this), 1); + + if (this._invocationChainRoot) { + this._invocationChain.chainStatus = Task.runStatuses.DONE; + } + + this._currentPrereqIndex = 0; + + // If 'complete' getting called because task has been + // run already, value will not be passed -- leave in place + if (!this._skipped) { + this.taskStatus = Task.runStatuses.DONE; + this.value = val; + + this.emit('complete', this.value); + this.emit('_done'); + + this.endTime = (new Date()).getTime(); + let taskTime = this.endTime - this.startTime; + + if (!(this._internal || jake.program.opts.quiet)) { + console.log("Finished '" + chalk.green(this.fullName) + "' after " + chalk.magenta(taskTime + ' ms')); + } + + } + } + + _getFullName() { + let ns = this.namespace; + let path = (ns && ns.path) || ''; + path = (path && path.split(':')) || []; + if (this.namespace !== jake.defaultNamespace) { + path.push(this.namespace.name); + } + path.push(this.name); + return path.join(':'); + } + + static getBaseNamespacePath(fullName) { + return fullName.split(':').slice(0, -1).join(':'); + } + + static getBaseTaskName(fullName) { + return fullName.split(':').pop(); + } +} + +Task.runStatuses = { + UNSTARTED: 'unstarted', + DONE: 'done', + STARTED: 'started', + ERROR: 'error' +}; + +Task.ROOT_TASK_NAME = ROOT_TASK_NAME; + +exports.Task = Task; + +// Required here because circular deps +require('../rule'); + diff --git a/node_modules/jake/lib/test_task.js b/node_modules/jake/lib/test_task.js new file mode 100644 index 0000000..6482bf1 --- /dev/null +++ b/node_modules/jake/lib/test_task.js @@ -0,0 +1,270 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +let path = require('path'); +let currDir = process.cwd(); + +/** + @name jake + @namespace jake +*/ +/** + @name jake.TestTask + @constructor + @description Instantiating a TestTask creates a number of Jake + Tasks that make running tests for your software easy. + + @param {String} name The name of the project + @param {Function} definition Defines the list of files containing the tests, + and the name of the namespace/task for running them. Will be executed on the + instantiated TestTask (i.e., 'this', will be the TestTask instance), to set + the various instance-propertiess. + + @example + let t = new jake.TestTask('bij-js', function () { + this.testName = 'testSpecial'; + this.testFiles.include('test/**'); + }); + + */ +let TestTask = function () { + let self = this; + let args = Array.prototype.slice.call(arguments); + let name = args.shift(); + let definition = args.pop(); + let prereqs = args.pop() || []; + + /** + @name jake.TestTask#testNam + @public + @type {String} + @description The name of the namespace to place the tests in, and + the top-level task for running tests. Defaults to "test" + */ + this.testName = 'test'; + + /** + @name jake.TestTask#testFiles + @public + @type {jake.FileList} + @description The list of files containing tests to load + */ + this.testFiles = new jake.FileList(); + + /** + @name jake.TestTask#showDescription + @public + @type {Boolean} + @description Show the created task when doing Jake -T + */ + this.showDescription = true; + + /* + @name jake.TestTask#totalTests + @public + @type {Number} + @description The total number of tests to run + */ + this.totalTests = 0; + + /* + @name jake.TestTask#executedTests + @public + @type {Number} + @description The number of tests successfully run + */ + this.executedTests = 0; + + if (typeof definition == 'function') { + definition.call(this); + } + + if (this.showDescription) { + desc('Run the tests for ' + name); + } + + task(this.testName, prereqs, {async: true}, function () { + let t = jake.Task[this.fullName + ':run']; + t.on('complete', function () { + complete(); + }); + // Pass args to the namespaced test + t.invoke.apply(t, arguments); + }); + + namespace(self.testName, function () { + + let runTask = task('run', {async: true}, function (pat) { + let re; + let testFiles; + + // Don't nest; make a top-level namespace. Don't want + // re-calling from inside to nest infinitely + jake.currentNamespace = jake.defaultNamespace; + + re = new RegExp(pat); + // Get test files that match the passed-in pattern + testFiles = self.testFiles.toArray() + .filter(function (f) { + return (re).test(f); + }) // Don't load the same file multiple times -- should this be in FileList? + .reduce(function (p, c) { + if (p.indexOf(c) < 0) { + p.push(c); + } + return p; + }, []); + + // Create a namespace for all the testing tasks to live in + namespace(self.testName + 'Exec', function () { + // Each test will be a prereq for the dummy top-level task + let prereqs = []; + // Continuation to pass to the async tests, wrapping `continune` + let next = function () { + complete(); + }; + // Create the task for this test-function + let createTask = function (name, action) { + // If the test-function is defined with a continuation + // param, flag the task as async + let t; + let isAsync = !!action.length; + + // Define the actual namespaced task with the name, the + // wrapped action, and the correc async-flag + t = task(name, createAction(name, action), { + async: isAsync + }); + t.once('complete', function () { + self.executedTests++; + }); + t._internal = true; + return t; + }; + // Used as the action for the defined task for each test. + let createAction = function (n, a) { + // A wrapped function that passes in the `next` function + // for any tasks that run asynchronously + return function () { + let cb; + if (a.length) { + cb = next; + } + if (!(n == 'before' || n == 'after' || + /_beforeEach$/.test(n) || /_afterEach$/.test(n))) { + jake.logger.log(n); + } + // 'this' will be the task when action is run + return a.call(this, cb); + }; + }; + // Dummy top-level task for everything to be prereqs for + let topLevel; + + // Pull in each test-file, and iterate over any exported + // test-functions. Register each test-function as a prereq task + testFiles.forEach(function (file) { + let exp = require(path.join(currDir, file)); + + // Create a namespace for each filename, so test-name collisions + // won't be a problem + namespace(file, function () { + let testPrefix = self.testName + 'Exec:' + file + ':'; + let testName; + // Dummy task for displaying file banner + testName = '*** Running ' + file + ' ***'; + prereqs.push(testPrefix + testName); + createTask(testName, function () {}); + + // 'before' setup + if (typeof exp.before == 'function') { + prereqs.push(testPrefix + 'before'); + // Create the task + createTask('before', exp.before); + } + + // Walk each exported function, and create a task for each + for (let p in exp) { + if (p == 'before' || p == 'after' || + p == 'beforeEach' || p == 'afterEach') { + continue; + } + + if (typeof exp.beforeEach == 'function') { + prereqs.push(testPrefix + p + '_beforeEach'); + // Create the task + createTask(p + '_beforeEach', exp.beforeEach); + } + + // Add the namespace:name of this test to the list of prereqs + // for the dummy top-level task + prereqs.push(testPrefix + p); + // Create the task + createTask(p, exp[p]); + + if (typeof exp.afterEach == 'function') { + prereqs.push(testPrefix + p + '_afterEach'); + // Create the task + createTask(p + '_afterEach', exp.afterEach); + } + } + + // 'after' teardown + if (typeof exp.after == 'function') { + prereqs.push(testPrefix + 'after'); + // Create the task + let afterTask = createTask('after', exp.after); + afterTask._internal = true; + } + + }); + }); + + self.totalTests = prereqs.length; + process.on('exit', function () { + // Throw in the case where the process exits without + // finishing tests, but no error was thrown + if (!jake.errorCode && (self.totalTests > self.executedTests)) { + throw new Error('Process exited without all tests completing.'); + } + }); + + // Create the dummy top-level task. When calling a task internally + // with `invoke` that is async (or has async prereqs), have to listen + // for the 'complete' event to know when it's done + topLevel = task('__top__', prereqs); + topLevel._internal = true; + topLevel.addListener('complete', function () { + jake.logger.log('All tests ran successfully'); + complete(); + }); + + topLevel.invoke(); // Do the thing! + }); + + }); + runTask._internal = true; + + }); + + +}; + +jake.TestTask = TestTask; +exports.TestTask = TestTask; + diff --git a/node_modules/jake/lib/utils/file.js b/node_modules/jake/lib/utils/file.js new file mode 100644 index 0000000..a436def --- /dev/null +++ b/node_modules/jake/lib/utils/file.js @@ -0,0 +1,286 @@ +/* + * Utilities: A classic collection of JavaScript utilities + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +let fs = require('fs'); +let path = require('path'); + +/** + @name file + @namespace file +*/ + +let fileUtils = new (function () { + + // Recursively copy files and directories + let _copyFile = function (fromPath, toPath, opts) { + let from = path.normalize(fromPath) + let to = path.normalize(toPath) + let options = opts || {} + let fromStat; + let toStat; + let destExists; + let destDoesNotExistErr; + let content; + let filename; + let dirContents; + let targetDir; + + fromStat = fs.statSync(from); + + try { + //console.dir(to + ' destExists'); + toStat = fs.statSync(to); + destExists = true; + } + catch(e) { + //console.dir(to + ' does not exist'); + destDoesNotExistErr = e; + destExists = false; + } + // Destination dir or file exists, copy into (directory) + // or overwrite (file) + if (destExists) { + + // If there's a rename-via-copy file/dir name passed, use it. + // Otherwise use the actual file/dir name + filename = options.rename || path.basename(from); + + // Copying a directory + if (fromStat.isDirectory()) { + dirContents = fs.readdirSync(from); + targetDir = path.join(to, filename); + // We don't care if the target dir already exists + try { + fs.mkdirSync(targetDir, {mode: fromStat.mode & 0o777}); + } + catch(e) { + if (e.code !== 'EEXIST') { + throw e; + } + } + for (let i = 0, ii = dirContents.length; i < ii; i++) { + _copyFile(path.join(from, dirContents[i]), targetDir, {preserveMode: options.preserveMode}); + } + } + // Copying a file + else { + content = fs.readFileSync(from); + let mode = fromStat.mode & 0o777; + let targetFile = to; + + if (toStat.isDirectory()) { + targetFile = path.join(to, filename); + } + + let fileExists = fs.existsSync(targetFile); + fs.writeFileSync(targetFile, content); + + // If the file didn't already exist, use the original file mode. + // Otherwise, only update the mode if preserverMode is true. + if(!fileExists || options.preserveMode) { + fs.chmodSync(targetFile, mode); + } + } + } + // Dest doesn't exist, can't create it + else { + throw destDoesNotExistErr; + } + }; + + // Remove the given directory + let _rmDir = function (dirPath) { + let dir = path.normalize(dirPath); + let paths = []; + paths = fs.readdirSync(dir); + paths.forEach(function (p) { + let curr = path.join(dir, p); + let stat = fs.lstatSync(curr); + if (stat.isDirectory()) { + _rmDir(curr); + } + else { + try { + fs.unlinkSync(curr); + } catch(e) { + if (e.code === 'EPERM') { + fs.chmodSync(curr, parseInt(666, 8)); + fs.unlinkSync(curr); + } else { + throw e; + } + } + } + }); + fs.rmdirSync(dir); + }; + + /** + @name file#cpR + @public + @function + @description Copies a directory/file to a destination + @param {String} fromPath The source path to copy from + @param {String} toPath The destination path to copy to + @param {Object} opts Options to use + @param {Boolean} [opts.preserveMode] If target file already exists, this + determines whether the original file's mode is copied over. The default of + false mimics the behavior of the `cp` command line tool. (Default: false) + */ + this.cpR = function (fromPath, toPath, options) { + let from = path.normalize(fromPath); + let to = path.normalize(toPath); + let toStat; + let doesNotExistErr; + let filename; + let opts = options || {}; + + if (from == to) { + throw new Error('Cannot copy ' + from + ' to itself.'); + } + + // Handle rename-via-copy + try { + toStat = fs.statSync(to); + } + catch(e) { + doesNotExistErr = e; + + // Get abs path so it's possible to check parent dir + if (!this.isAbsolute(to)) { + to = path.join(process.cwd(), to); + } + + // Save the file/dir name + filename = path.basename(to); + // See if a parent dir exists, so there's a place to put the + /// renamed file/dir (resets the destination for the copy) + to = path.dirname(to); + try { + toStat = fs.statSync(to); + } + catch(e) {} + if (toStat && toStat.isDirectory()) { + // Set the rename opt to pass to the copy func, will be used + // as the new file/dir name + opts.rename = filename; + //console.log('filename ' + filename); + } + else { + throw doesNotExistErr; + } + } + + _copyFile(from, to, opts); + }; + + /** + @name file#mkdirP + @public + @function + @description Create the given directory(ies) using the given mode permissions + @param {String} dir The directory to create + @param {Number} mode The mode to give the created directory(ies)(Default: 0755) + */ + this.mkdirP = function (dir, mode) { + let dirPath = path.normalize(dir); + let paths = dirPath.split(/\/|\\/); + let currPath = ''; + let next; + + if (paths[0] == '' || /^[A-Za-z]+:/.test(paths[0])) { + currPath = paths.shift() || '/'; + currPath = path.join(currPath, paths.shift()); + //console.log('basedir'); + } + while ((next = paths.shift())) { + if (next == '..') { + currPath = path.join(currPath, next); + continue; + } + currPath = path.join(currPath, next); + try { + //console.log('making ' + currPath); + fs.mkdirSync(currPath, mode || parseInt(755, 8)); + } + catch(e) { + if (e.code != 'EEXIST') { + throw e; + } + } + } + }; + + /** + @name file#rmRf + @public + @function + @description Deletes the given directory/file + @param {String} p The path to delete, can be a directory or file + */ + this.rmRf = function (p, options) { + let stat; + try { + stat = fs.lstatSync(p); + if (stat.isDirectory()) { + _rmDir(p); + } + else { + fs.unlinkSync(p); + } + } + catch (e) {} + }; + + /** + @name file#isAbsolute + @public + @function + @return {Boolean/String} If it's absolute the first character is returned otherwise false + @description Checks if a given path is absolute or relative + @param {String} p Path to check + */ + this.isAbsolute = function (p) { + let match = /^[A-Za-z]+:\\|^\//.exec(p); + if (match && match.length) { + return match[0]; + } + return false; + }; + + /** + @name file#absolutize + @public + @function + @return {String} Returns the absolute path for the given path + @description Returns the absolute path for the given path + @param {String} p The path to get the absolute path for + */ + this.absolutize = function (p) { + if (this.isAbsolute(p)) { + return p; + } + else { + return path.join(process.cwd(), p); + } + }; + +})(); + +module.exports = fileUtils; + diff --git a/node_modules/jake/lib/utils/index.js b/node_modules/jake/lib/utils/index.js new file mode 100644 index 0000000..17d686b --- /dev/null +++ b/node_modules/jake/lib/utils/index.js @@ -0,0 +1,297 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + + +let util = require('util'); // Native Node util module +let spawn = require('child_process').spawn; +let EventEmitter = require('events').EventEmitter; +let logger = require('./logger'); +let file = require('./file'); +let Exec; + +const _UUID_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'.split(''); + +let parseArgs = function (argumentsObj) { + let args; + let arg; + let cmds; + let callback; + let opts = { + interactive: false, + printStdout: false, + printStderr: false, + breakOnError: true + }; + + args = Array.prototype.slice.call(argumentsObj); + + cmds = args.shift(); + // Arrayize if passed a single string command + if (typeof cmds == 'string') { + cmds = [cmds]; + } + // Make a copy if it's an actual list + else { + cmds = cmds.slice(); + } + + // Get optional callback or opts + while((arg = args.shift())) { + if (typeof arg == 'function') { + callback = arg; + } + else if (typeof arg == 'object') { + opts = Object.assign(opts, arg); + } + } + + // Backward-compat shim + if (typeof opts.stdout != 'undefined') { + opts.printStdout = opts.stdout; + delete opts.stdout; + } + if (typeof opts.stderr != 'undefined') { + opts.printStderr = opts.stderr; + delete opts.stderr; + } + + return { + cmds: cmds, + opts: opts, + callback: callback + }; +}; + +/** + @name jake + @namespace jake +*/ +let utils = new (function () { + /** + @name jake.exec + @static + @function + @description Executes shell-commands asynchronously with an optional + final callback. + ` + @param {String[]} cmds The list of shell-commands to execute + @param {Object} [opts] + @param {Boolean} [opts.printStdout=false] Print stdout from each command + @param {Boolean} [opts.printStderr=false] Print stderr from each command + @param {Boolean} [opts.breakOnError=true] Stop further execution on + the first error. + @param {Boolean} [opts.windowsVerbatimArguments=false] Don't translate + arguments on Windows. + @param {Function} [callback] Callback to run after executing the + commands + + @example + let cmds = [ + 'echo "showing directories"' + , 'ls -al | grep ^d' + , 'echo "moving up a directory"' + , 'cd ../' + ] + , callback = function () { + console.log('Finished running commands.'); + } + jake.exec(cmds, {stdout: true}, callback); + */ + this.exec = function (a, b, c) { + let parsed = parseArgs(arguments); + let cmds = parsed.cmds; + let opts = parsed.opts; + let callback = parsed.callback; + + let ex = new Exec(cmds, opts, callback); + + ex.addListener('error', function (msg, code) { + if (opts.breakOnError) { + fail(msg, code); + } + }); + ex.run(); + + return ex; + }; + + this.createExec = function (a, b, c) { + return new Exec(a, b, c); + }; + + // From Math.uuid.js, https://github.com/broofa/node-uuid + // Robert Kieffer (robert@broofa.com), MIT license + this.uuid = function (length, radix) { + var chars = _UUID_CHARS + , uuid = [] + , r + , i; + + radix = radix || chars.length; + + if (length) { + // Compact form + i = -1; + while (++i < length) { + uuid[i] = chars[0 | Math.random()*radix]; + } + } else { + // rfc4122, version 4 form + + // rfc4122 requires these characters + uuid[8] = uuid[13] = uuid[18] = uuid[23] = '-'; + uuid[14] = '4'; + + // Fill in random data. At i==19 set the high bits of clock sequence as + // per rfc4122, sec. 4.1.5 + i = -1; + while (++i < 36) { + if (!uuid[i]) { + r = 0 | Math.random()*16; + uuid[i] = chars[(i == 19) ? (r & 0x3) | 0x8 : r]; + } + } + } + + return uuid.join(''); + }; + +})(); + +Exec = function () { + let parsed = parseArgs(arguments); + let cmds = parsed.cmds; + let opts = parsed.opts; + let callback = parsed.callback; + + this._cmds = cmds; + this._callback = callback; + this._config = opts; +}; + +util.inherits(Exec, EventEmitter); + +Object.assign(Exec.prototype, new (function () { + + let _run = function () { + let self = this; + let sh; + let cmd; + let args; + let next = this._cmds.shift(); + let config = this._config; + let errData = ''; + let shStdio; + let handleStdoutData = function (data) { + self.emit('stdout', data); + }; + let handleStderrData = function (data) { + let d = data.toString(); + self.emit('stderr', data); + // Accumulate the error-data so we can use it as the + // stack if the process exits with an error + errData += d; + }; + + // Keep running as long as there are commands in the array + if (next) { + let spawnOpts = {}; + this.emit('cmdStart', next); + + // Ganking part of Node's child_process.exec to get cmdline args parsed + if (process.platform == 'win32') { + cmd = 'cmd'; + args = ['/c', next]; + if (config.windowsVerbatimArguments) { + spawnOpts.windowsVerbatimArguments = true; + } + } + else { + cmd = '/bin/sh'; + args = ['-c', next]; + } + + if (config.interactive) { + spawnOpts.stdio = 'inherit'; + sh = spawn(cmd, args, spawnOpts); + } + else { + shStdio = [ + process.stdin + ]; + if (config.printStdout) { + shStdio.push(process.stdout); + } + else { + shStdio.push('pipe'); + } + if (config.printStderr) { + shStdio.push(process.stderr); + } + else { + shStdio.push('pipe'); + } + spawnOpts.stdio = shStdio; + sh = spawn(cmd, args, spawnOpts); + if (!config.printStdout) { + sh.stdout.addListener('data', handleStdoutData); + } + if (!config.printStderr) { + sh.stderr.addListener('data', handleStderrData); + } + } + + // Exit, handle err or run next + sh.on('exit', function (code) { + let msg; + if (code !== 0) { + msg = errData || 'Process exited with error.'; + msg = msg.trim(); + self.emit('error', msg, code); + } + if (code === 0 || !config.breakOnError) { + self.emit('cmdEnd', next); + setTimeout(function () { _run.call(self); }, 0); + } + }); + + } + else { + self.emit('end'); + if (typeof self._callback == 'function') { + self._callback(); + } + } + }; + + this.append = function (cmd) { + this._cmds.push(cmd); + }; + + this.run = function () { + _run.call(this); + }; + +})()); + +utils.Exec = Exec; +utils.file = file; +utils.logger = logger; + +module.exports = utils; + diff --git a/node_modules/jake/lib/utils/logger.js b/node_modules/jake/lib/utils/logger.js new file mode 100644 index 0000000..8f72686 --- /dev/null +++ b/node_modules/jake/lib/utils/logger.js @@ -0,0 +1,24 @@ +let util = require('util'); + +let logger = new (function () { + let _output = function (type, out) { + let quiet = typeof jake != 'undefined' && jake.program && + jake.program.opts && jake.program.opts.quiet; + let msg; + if (!quiet) { + msg = typeof out == 'string' ? out : util.inspect(out); + console[type](msg); + } + }; + + this.log = function (out) { + _output('log', out); + }; + + this.error = function (out) { + _output('error', out); + }; + +})(); + +module.exports = logger; diff --git a/node_modules/jake/package.json b/node_modules/jake/package.json new file mode 100644 index 0000000..8c4a961 --- /dev/null +++ b/node_modules/jake/package.json @@ -0,0 +1,42 @@ +{ + "name": "jake", + "description": "JavaScript build tool, similar to Make or Rake", + "keywords": [ + "build", + "cli", + "make", + "rake" + ], + "version": "10.8.2", + "author": "Matthew Eernisse <mde@fleegix.org> (http://fleegix.org)", + "license": "Apache-2.0", + "bin": { + "jake": "./bin/cli.js" + }, + "main": "./lib/jake.js", + "scripts": { + "lint": "eslint --format codeframe \"lib/**/*.js\" \"test/**/*.js\"", + "lint:fix": "eslint --fix \"lib/**/*.js\" \"test/**/*.js\"", + "test": "./bin/cli.js test", + "test:ci": "npm run lint && npm run test" + }, + "repository": { + "type": "git", + "url": "git://github.com/jakejs/jake.git" + }, + "preferGlobal": true, + "dependencies": { + "filelist": "^1.0.1", + "minimatch": "^3.0.4", + "chalk": "^2.4.2", + "async": "0.9.x" + }, + "devDependencies": { + "eslint": "^6.8.0", + "mocha": "^7.1.1", + "q": "^1.5.1" + }, + "engines": { + "node": "*" + } +} diff --git a/node_modules/jake/test/integration/concurrent.js b/node_modules/jake/test/integration/concurrent.js new file mode 100644 index 0000000..4ae41e8 --- /dev/null +++ b/node_modules/jake/test/integration/concurrent.js @@ -0,0 +1,42 @@ +let assert = require('assert'); +let exec = require('child_process').execSync; + +suite('concurrent', function () { + + this.timeout(7000); + + test(' simple concurrent prerequisites 1', function () { + let out = exec('./node_modules/.bin/jake -q concurrent:simple1').toString().trim() + assert.equal('Started A\nStarted B\nFinished B\nFinished A', out); + }); + + test(' simple concurrent prerequisites 2', function () { + let out = exec('./node_modules/.bin/jake -q concurrent:simple2').toString().trim() + assert.equal('Started C\nStarted D\nFinished C\nFinished D', out); + }); + + test(' sequential concurrent prerequisites', function () { + let out = exec('./node_modules/.bin/jake -q concurrent:seqconcurrent').toString().trim() + assert.equal('Started A\nStarted B\nFinished B\nFinished A\nStarted C\nStarted D\nFinished C\nFinished D', out); + }); + + test(' concurrent concurrent prerequisites', function () { + let out = exec('./node_modules/.bin/jake -q concurrent:concurrentconcurrent').toString().trim() + assert.equal('Started A\nStarted B\nStarted C\nStarted D\nFinished B\nFinished C\nFinished A\nFinished D', out); + }); + + test(' concurrent prerequisites with subdependency', function () { + let out = exec('./node_modules/.bin/jake -q concurrent:subdep').toString().trim() + assert.equal('Started A\nFinished A\nStarted Ba\nFinished Ba', out); + }); + + test(' failing in concurrent prerequisites', function () { + try { + exec('./node_modules/.bin/jake -q concurrent:Cfail'); + } + catch(err) { + assert(err.message.indexOf('Command failed') > -1); + } + }); + +}); diff --git a/node_modules/jake/test/integration/file.js b/node_modules/jake/test/integration/file.js new file mode 100644 index 0000000..97ed0d6 --- /dev/null +++ b/node_modules/jake/test/integration/file.js @@ -0,0 +1,228 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +const PROJECT_DIR = process.env.PROJECT_DIR; + +let assert = require('assert'); +let fs = require('fs'); +let path = require('path'); +let file = require(`${PROJECT_DIR}/lib/utils/file`); +let existsSync = fs.existsSync || path.existsSync; +let exec = require('child_process').execSync; + +suite('fileUtils', function () { + + test('mkdirP', function () { + let expected = [ + ['foo'], + ['foo', 'bar'], + ['foo', 'bar', 'baz'], + ['foo', 'bar', 'baz', 'qux'] + ]; + file.mkdirP('foo/bar/baz/qux'); + let res = exec('find foo').toString().trim().split('\n'); + for (let i = 0, ii = res.length; i < ii; i++) { + assert.equal(path.join.apply(path, expected[i]), res[i]); + } + file.rmRf('foo'); + }); + + test('rmRf', function () { + file.mkdirP('foo/bar/baz/qux'); + file.rmRf('foo/bar'); + let res = exec('find foo').toString().trim().split('\n'); + assert.equal(1, res.length); + assert.equal('foo', res[0]); + fs.rmdirSync('foo'); + }); + + test('rmRf with symlink subdir', function () { + file.mkdirP('foo'); + file.mkdirP('bar'); + fs.writeFileSync('foo/hello.txt', 'hello, it\'s me'); + fs.symlinkSync('../foo', 'bar/foo'); file.rmRf('bar'); + + // Make sure the bar directory was successfully deleted + let barDeleted = false; + try { + fs.statSync('bar'); + } catch(err) { + if(err.code == 'ENOENT') { + barDeleted = true; + } + } + assert.equal(true, barDeleted); + + // Make sure that the file inside the linked folder wasn't deleted + let res = fs.readdirSync('foo'); + assert.equal(1, res.length); + assert.equal('hello.txt', res[0]); + + // Cleanup + fs.unlinkSync('foo/hello.txt'); + fs.rmdirSync('foo'); + }); + + test('rmRf with symlinked dir', function () { + file.mkdirP('foo'); + fs.writeFileSync('foo/hello.txt', 'hello!'); + fs.symlinkSync('foo', 'bar'); + file.rmRf('bar'); + + // Make sure the bar directory was successfully deleted + let barDeleted = false; + try { + fs.statSync('bar'); + } catch(err) { + if(err.code == 'ENOENT') { + barDeleted = true; + } + } + assert.equal(true, barDeleted); + + // Make sure that the file inside the linked folder wasn't deleted + let res = fs.readdirSync('foo'); + assert.equal(1, res.length); + assert.equal('hello.txt', res[0]); + + // Cleanup + fs.unlinkSync('foo/hello.txt'); + fs.rmdirSync('foo'); + }); + + test('cpR with same name and different directory', function () { + file.mkdirP('foo'); + fs.writeFileSync('foo/bar.txt', 'w00t'); + file.cpR('foo', 'bar'); + assert.ok(existsSync('bar/bar.txt')); + file.rmRf('foo'); + file.rmRf('bar'); + }); + + test('cpR with same to and from will throw', function () { + assert.throws(function () { + file.cpR('foo.txt', 'foo.txt'); + }); + }); + + test('cpR rename via copy in directory', function () { + file.mkdirP('foo'); + fs.writeFileSync('foo/bar.txt', 'w00t'); + file.cpR('foo/bar.txt', 'foo/baz.txt'); + assert.ok(existsSync('foo/baz.txt')); + file.rmRf('foo'); + }); + + test('cpR rename via copy in base', function () { + fs.writeFileSync('bar.txt', 'w00t'); + file.cpR('bar.txt', 'baz.txt'); + assert.ok(existsSync('baz.txt')); + file.rmRf('bar.txt'); + file.rmRf('baz.txt'); + }); + + test('cpR keeps file mode', function () { + fs.writeFileSync('bar.txt', 'w00t', {mode: 0o750}); + fs.writeFileSync('bar1.txt', 'w00t!', {mode: 0o744}); + file.cpR('bar.txt', 'baz.txt'); + file.cpR('bar1.txt', 'baz1.txt'); + + assert.ok(existsSync('baz.txt')); + assert.ok(existsSync('baz1.txt')); + let bazStat = fs.statSync('baz.txt'); + let bazStat1 = fs.statSync('baz1.txt'); + assert.equal(0o750, bazStat.mode & 0o7777); + assert.equal(0o744, bazStat1.mode & 0o7777); + + file.rmRf('bar.txt'); + file.rmRf('baz.txt'); + file.rmRf('bar1.txt'); + file.rmRf('baz1.txt'); + }); + + test('cpR keeps file mode when overwriting with preserveMode', function () { + fs.writeFileSync('bar.txt', 'w00t', {mode: 0o755}); + fs.writeFileSync('baz.txt', 'w00t!', {mode: 0o744}); + file.cpR('bar.txt', 'baz.txt', {silent: true, preserveMode: true}); + + assert.ok(existsSync('baz.txt')); + let bazStat = fs.statSync('baz.txt'); + assert.equal(0o755, bazStat.mode & 0o777); + + file.rmRf('bar.txt'); + file.rmRf('baz.txt'); + }); + + test('cpR does not keep file mode when overwriting', function () { + fs.writeFileSync('bar.txt', 'w00t', {mode: 0o766}); + fs.writeFileSync('baz.txt', 'w00t!', {mode: 0o744}); + file.cpR('bar.txt', 'baz.txt'); + + assert.ok(existsSync('baz.txt')); + let bazStat = fs.statSync('baz.txt'); + assert.equal(0o744, bazStat.mode & 0o777); + + file.rmRf('bar.txt'); + file.rmRf('baz.txt'); + }); + + test('cpR copies file mode recursively', function () { + fs.mkdirSync('foo'); + fs.writeFileSync('foo/bar.txt', 'w00t', {mode: 0o740}); + file.cpR('foo', 'baz'); + + assert.ok(existsSync('baz')); + let barStat = fs.statSync('baz/bar.txt'); + assert.equal(0o740, barStat.mode & 0o777); + + file.rmRf('foo'); + file.rmRf('baz'); + }); + + test('cpR keeps file mode recursively', function () { + fs.mkdirSync('foo'); + fs.writeFileSync('foo/bar.txt', 'w00t', {mode: 0o740}); + fs.mkdirSync('baz'); + fs.mkdirSync('baz/foo'); + fs.writeFileSync('baz/foo/bar.txt', 'w00t!', {mode: 0o755}); + file.cpR('foo', 'baz', {silent: true, preserveMode: true}); + + assert.ok(existsSync('baz')); + let barStat = fs.statSync('baz/foo/bar.txt'); + assert.equal(0o740, barStat.mode & 0o777); + + file.rmRf('foo'); + file.rmRf('baz'); + }); + + test('cpR copies directory mode recursively', function () { + fs.mkdirSync('foo', 0o755); + fs.mkdirSync('foo/bar', 0o700); + file.cpR('foo', 'bar'); + + assert.ok(existsSync('foo')); + let fooBarStat = fs.statSync('bar/bar'); + assert.equal(0o700, fooBarStat.mode & 0o777); + + file.rmRf('foo'); + file.rmRf('bar'); + }); + +}); + + diff --git a/node_modules/jake/test/integration/file_task.js b/node_modules/jake/test/integration/file_task.js new file mode 100644 index 0000000..b48f07e --- /dev/null +++ b/node_modules/jake/test/integration/file_task.js @@ -0,0 +1,125 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +const PROJECT_DIR = process.env.PROJECT_DIR; + +let assert = require('assert'); +let fs = require('fs'); +let exec = require('child_process').execSync; +let { rmRf } = require(`${PROJECT_DIR}/lib/jake`); + +let cleanUpAndNext = function (callback) { + rmRf('./foo', { + silent: true + }); + callback && callback(); +}; + +suite('fileTask', function () { + this.timeout(7000); + + setup(function () { + cleanUpAndNext(); + }); + + test('where a file-task prereq does not change with --always-make', function () { + let out; + out = exec('./node_modules/.bin/jake -q fileTest:foo/from-src1.txt').toString().trim(); + assert.equal('fileTest:foo/src1.txt task\nfileTest:foo/from-src1.txt task', + out); + out = exec('./node_modules/.bin/jake -q -B fileTest:foo/from-src1.txt').toString().trim(); + assert.equal('fileTest:foo/src1.txt task\nfileTest:foo/from-src1.txt task', + out); + cleanUpAndNext(); + }); + + test('concating two files', function () { + let out; + out = exec('./node_modules/.bin/jake -q fileTest:foo/concat.txt').toString().trim(); + assert.equal('fileTest:foo/src1.txt task\ndefault task\nfileTest:foo/src2.txt task\n' + + 'fileTest:foo/concat.txt task', out); + // Check to see the two files got concat'd + let data = fs.readFileSync(process.cwd() + '/foo/concat.txt'); + assert.equal('src1src2', data.toString()); + cleanUpAndNext(); + }); + + test('where a file-task prereq does not change', function () { + let out; + out = exec('./node_modules/.bin/jake -q fileTest:foo/from-src1.txt').toString().trim(); + assert.equal('fileTest:foo/src1.txt task\nfileTest:foo/from-src1.txt task', out); + out = exec('./node_modules/.bin/jake -q fileTest:foo/from-src1.txt').toString().trim(); + // Second time should be a no-op + assert.equal('', out); + cleanUpAndNext(); + }); + + test('where a file-task prereq does change, then does not', function (next) { + exec('mkdir -p ./foo'); + exec('touch ./foo/from-src1.txt'); + setTimeout(() => { + fs.writeFileSync('./foo/src1.txt', '-SRC'); + // Task should run the first time + let out; + out = exec('./node_modules/.bin/jake -q fileTest:foo/from-src1.txt').toString().trim(); + assert.equal('fileTest:foo/from-src1.txt task', out); + // Task should not run on subsequent invocation + out = exec('./node_modules/.bin/jake -q fileTest:foo/from-src1.txt').toString().trim(); + assert.equal('', out); + cleanUpAndNext(next); + }, 1000); + }); + + test('a preexisting file', function () { + let prereqData = 'howdy'; + exec('mkdir -p ./foo'); + fs.writeFileSync('foo/prereq.txt', prereqData); + let out; + out = exec('./node_modules/.bin/jake -q fileTest:foo/from-prereq.txt').toString().trim(); + assert.equal('fileTest:foo/from-prereq.txt task', out); + let data = fs.readFileSync(process.cwd() + '/foo/from-prereq.txt'); + assert.equal(prereqData, data.toString()); + out = exec('./node_modules/.bin/jake -q fileTest:foo/from-prereq.txt').toString().trim(); + // Second time should be a no-op + assert.equal('', out); + cleanUpAndNext(); + }); + + test('a preexisting file with --always-make flag', function () { + let prereqData = 'howdy'; + exec('mkdir -p ./foo'); + fs.writeFileSync('foo/prereq.txt', prereqData); + let out; + out = exec('./node_modules/.bin/jake -q fileTest:foo/from-prereq.txt').toString().trim(); + assert.equal('fileTest:foo/from-prereq.txt task', out); + let data = fs.readFileSync(process.cwd() + '/foo/from-prereq.txt'); + assert.equal(prereqData, data.toString()); + out = exec('./node_modules/.bin/jake -q -B fileTest:foo/from-prereq.txt').toString().trim(); + assert.equal('fileTest:foo/from-prereq.txt task', out); + cleanUpAndNext(); + }); + + test('nested directory-task', function () { + exec('./node_modules/.bin/jake -q fileTest:foo/bar/baz/bamf.txt'); + let data = fs.readFileSync(process.cwd() + '/foo/bar/baz/bamf.txt'); + assert.equal('w00t', data); + cleanUpAndNext(); + }); + +}); + diff --git a/node_modules/jake/test/integration/helpers.js b/node_modules/jake/test/integration/helpers.js new file mode 100644 index 0000000..9caaa4e --- /dev/null +++ b/node_modules/jake/test/integration/helpers.js @@ -0,0 +1,80 @@ +var exec = require('child_process').exec; + +var helpers = new (function () { + var _tests; + var _names = []; + var _name; + var _callback; + var _runner = function () { + if ((_name = _names.shift())) { + console.log('Running ' + _name); + _tests[_name](); + } + else { + _callback(); + } + }; + + this.exec = function () { + var args = Array.prototype.slice.call(arguments); + var arg; + var cmd = args.shift(); + var opts = {}; + var callback; + // Optional opts/callback or callback/opts + while ((arg = args.shift())) { + if (typeof arg == 'function') { + callback = arg; + } + else { + opts = arg; + } + } + + cmd += ' --trace'; + var execOpts = opts.execOpts ? opts.execOpts : {}; + exec(cmd, execOpts, function (err, stdout, stderr) { + var out = helpers.trim(stdout); + if (err) { + if (opts.breakOnError === false) { + return callback(err); + } + else { + throw err; + } + } + if (stderr) { + callback(stderr); + } + else { + callback(out); + } + }); + }; + + this.trim = function (s) { + var str = s || ''; + return str.replace(/^\s*|\s*$/g, ''); + }; + + this.parse = function (s) { + var str = s || ''; + str = helpers.trim(str); + str = str.replace(/'/g, '"'); + return JSON.parse(str); + }; + + this.run = function (tests, callback) { + _tests = tests; + _names = Object.keys(tests); + _callback = callback; + _runner(); + }; + + this.next = function () { + _runner(); + }; + +})(); + +module.exports = helpers; diff --git a/node_modules/jake/test/integration/jakefile.js b/node_modules/jake/test/integration/jakefile.js new file mode 100644 index 0000000..f3b7d1a --- /dev/null +++ b/node_modules/jake/test/integration/jakefile.js @@ -0,0 +1,337 @@ +let fs = require('fs'); +let Q = require('q'); + +desc('The default t.'); +task('default', function () { + console.log('default task'); +}); + +desc('No action.'); +task({'noAction': ['default']}); + +desc('No action, no prereqs.'); +task('noActionNoPrereqs'); + +desc('Top-level zerbofrangazoomy task'); +task('zerbofrangazoomy', function () { + console.log('Whaaaaaaaa? Ran the zerbofrangazoomy task!') +}); + +desc('Task that throws'); +task('throwy', function () { + let errorListener = function (err) { + console.log('Emitted'); + console.log(err.toString()); + + jake.removeListener('error', errorListener); + }; + + jake.on('error', errorListener); + + throw new Error('I am bad'); +}); + +desc('Task that rejects a Promise'); +task('promiseRejecter', function () { + const originalOption = jake.program.opts['allow-rejection']; + + const errorListener = function (err) { + console.log(err.toString()); + jake.removeListener('error', errorListener); + jake.program.opts['allow-rejection'] = originalOption; // Restore original 'allow-rejection' option + }; + jake.on('error', errorListener); + + jake.program.opts['allow-rejection'] = false; // Do not allow rejection so the rejection is passed to error handlers + + Promise.reject('<promise rejected on purpose>'); +}); + +desc('Accepts args and env vars.'); +task('argsEnvVars', function () { + let res = { + args: arguments + , env: { + foo: process.env.foo + , baz: process.env.baz + } + }; + console.log(JSON.stringify(res)); +}); + +namespace('foo', function () { + desc('The foo:bar t.'); + task('bar', function () { + if (arguments.length) { + console.log('foo:bar[' + + Array.prototype.join.call(arguments, ',') + + '] task'); + } + else { + console.log('foo:bar task'); + } + }); + + desc('The foo:baz task, calls foo:bar as a prerequisite.'); + task('baz', ['foo:bar'], function () { + console.log('foo:baz task'); + }); + + desc('The foo:qux task, calls foo:bar with cmdline args as a prerequisite.'); + task('qux', ['foo:bar[asdf,qwer]'], function () { + console.log('foo:qux task'); + }); + + desc('The foo:frang task,`invokes` foo:bar with passed args as a prerequisite.'); + task('frang', function () { + let t = jake.Task['foo:bar']; + // Do args pass-through + t.invoke.apply(t, arguments); + t.on('complete', () => { + console.log('foo:frang task'); + }); + }); + + desc('The foo:zerb task, `executes` foo:bar with passed args as a prerequisite.'); + task('zerb', function () { + let t = jake.Task['foo:bar']; + // Do args pass-through + t.execute.apply(t, arguments); + t.on('complete', () => { + console.log('foo:zerb task'); + }); + }); + + desc('The foo:zoobie task, has no prerequisites.'); + task('zoobie', function () { + console.log('foo:zoobie task'); + }); + + desc('The foo:voom task, run the foo:zoobie task repeatedly.'); + task('voom', function () { + let t = jake.Task['foo:bar']; + t.on('complete', function () { + console.log('complete'); + }); + t.execute.apply(t); + t.execute.apply(t); + }); + + desc('The foo:asdf task, has the same prereq twice.'); + task('asdf', ['foo:bar', 'foo:baz'], function () { + console.log('foo:asdf task'); + }); + +}); + +namespace('bar', function () { + desc('The bar:foo task, has no prerequisites, is async, returns Promise which resolves.'); + task('foo', async function () { + return new Promise((resolve, reject) => { + console.log('bar:foo task'); + resolve(); + }); + }); + + desc('The bar:promise task has no prerequisites, is async, returns Q-based promise.'); + task('promise', function () { + return Q() + .then(function () { + console.log('bar:promise task'); + return 123654; + }); + }); + + desc('The bar:dependOnpromise task waits for a promise based async test'); + task('dependOnpromise', ['promise'], function () { + console.log('bar:dependOnpromise task saw value', jake.Task["bar:promise"].value); + }); + + desc('The bar:brokenPromise task is a failing Q-promise based async task.'); + task('brokenPromise', function () { + return Q() + .then(function () { + throw new Error("nom nom nom"); + }); + }); + + desc('The bar:bar task, has the async bar:foo task as a prerequisite.'); + task('bar', ['bar:foo'], function () { + console.log('bar:bar task'); + }); + +}); + +namespace('hoge', function () { + desc('The hoge:hoge task, has no prerequisites.'); + task('hoge', function () { + console.log('hoge:hoge task'); + }); + + desc('The hoge:piyo task, has no prerequisites.'); + task('piyo', function () { + console.log('hoge:piyo task'); + }); + + desc('The hoge:fuga task, has hoge:hoge and hoge:piyo as prerequisites.'); + task('fuga', ['hoge:hoge', 'hoge:piyo'], function () { + console.log('hoge:fuga task'); + }); + + desc('The hoge:charan task, has hoge:fuga as a prerequisite.'); + task('charan', ['hoge:fuga'], function () { + console.log('hoge:charan task'); + }); + + desc('The hoge:gero task, has hoge:fuga as a prerequisite.'); + task('gero', ['hoge:fuga'], function () { + console.log('hoge:gero task'); + }); + + desc('The hoge:kira task, has hoge:charan and hoge:gero as prerequisites.'); + task('kira', ['hoge:charan', 'hoge:gero'], function () { + console.log('hoge:kira task'); + }); + +}); + +namespace('fileTest', function () { + directory('foo'); + + desc('File task, concatenating two files together'); + file('foo/concat.txt', ['fileTest:foo', 'fileTest:foo/src1.txt', 'fileTest:foo/src2.txt'], function () { + console.log('fileTest:foo/concat.txt task'); + let data1 = fs.readFileSync('foo/src1.txt'); + let data2 = fs.readFileSync('foo/src2.txt'); + fs.writeFileSync('foo/concat.txt', data1 + data2); + }); + + desc('File task, async creation with writeFile'); + file('foo/src1.txt', function () { + return new Promise(function (resolve, reject) { + fs.writeFile('foo/src1.txt', 'src1', function (err) { + if (err) { + reject(err); + } + else { + console.log('fileTest:foo/src1.txt task'); + resolve(); + } + }); + }); + }); + + desc('File task, sync creation with writeFileSync'); + file('foo/src2.txt', ['default'], function () { + fs.writeFileSync('foo/src2.txt', 'src2'); + console.log('fileTest:foo/src2.txt task'); + }); + + desc('File task, do not run unless the prereq file changes'); + file('foo/from-src1.txt', ['fileTest:foo', 'fileTest:foo/src1.txt'], function () { + let data = fs.readFileSync('foo/src1.txt').toString(); + fs.writeFileSync('foo/from-src1.txt', data); + console.log('fileTest:foo/from-src1.txt task'); + }); + + desc('File task, run if the prereq file changes'); + task('touch-prereq', function () { + fs.writeFileSync('foo/prereq.txt', 'UPDATED'); + }) + + desc('File task, has a preexisting file (with no associated task) as a prereq'); + file('foo/from-prereq.txt', ['fileTest:foo', 'foo/prereq.txt'], function () { + let data = fs.readFileSync('foo/prereq.txt'); + fs.writeFileSync('foo/from-prereq.txt', data); + console.log('fileTest:foo/from-prereq.txt task'); + }); + + directory('foo/bar/baz'); + + desc('Write a file in a nested subdirectory'); + file('foo/bar/baz/bamf.txt', ['foo/bar/baz'], function () { + fs.writeFileSync('foo/bar/baz/bamf.txt', 'w00t'); + }); + +}); + +task('blammo'); +// Define task +task('voom', ['blammo'], function () { + console.log(this.prereqs.length); +}); + +// Modify, add a prereq +task('voom', ['noActionNoPrereqs']); + +namespace('vronk', function () { + task('groo', function () { + let t = jake.Task['vronk:zong']; + t.addListener('error', function (e) { + console.log(e.message); + }); + t.invoke(); + }); + task('zong', function () { + throw new Error('OMFGZONG'); + }); +}); + +// define namespace +namespace('one', function () { + task('one', function () { + console.log('one:one'); + }); +}); + +// modify namespace (add task) +namespace('one', function () { + task('two', ['one:one'], function () { + console.log('one:two'); + }); +}); + +task('selfdepconst', [], function () { + task('selfdep', ['selfdep'], function () { + console.log("I made a task that depends on itself"); + }); +}); +task('selfdepdyn', function () { + task('selfdeppar', [], {concurrency: 2}, function () { + console.log("I will depend on myself and will fail at runtime"); + }); + task('selfdeppar', ['selfdeppar']); + jake.Task['selfdeppar'].invoke(); +}); + +namespace("large", function () { + task("leaf", function () { + console.log("large:leaf"); + }); + + const same = []; + for (let i = 0; i < 2000; i++) { + same.push("leaf"); + } + + desc("Task with a large number of same prereqs"); + task("same", same, { concurrency: 2 }, function () { + console.log("large:same"); + }); + + const different = []; + for (let i = 0; i < 2000; i++) { + const name = "leaf-" + i; + task(name, function () { + if (name === "leaf-12" || name === "leaf-123") { + console.log(name); + } + }); + different.push(name); + } + + desc("Task with a large number of different prereqs"); + task("different", different, { concurrency: 2 } , function () { + console.log("large:different") + }) +}); diff --git a/node_modules/jake/test/integration/jakelib/concurrent.jake.js b/node_modules/jake/test/integration/jakelib/concurrent.jake.js new file mode 100644 index 0000000..684c86f --- /dev/null +++ b/node_modules/jake/test/integration/jakelib/concurrent.jake.js @@ -0,0 +1,113 @@ + +namespace('concurrent', function () { + task('A', function () { + console.log('Started A'); + return new Promise((resolve, reject) => { + setTimeout(() => { + console.log('Finished A'); + resolve(); + }, 200); + }); + }); + + task('B', function () { + console.log('Started B'); + return new Promise((resolve, reject) => { + setTimeout(() => { + console.log('Finished B'); + resolve(); + }, 50); + }); + }); + + task('C', function () { + console.log('Started C'); + return new Promise((resolve, reject) => { + setTimeout(() => { + console.log('Finished C'); + resolve(); + }, 100); + }); + }); + + task('D', function () { + console.log('Started D'); + return new Promise((resolve, reject) => { + setTimeout(() => { + console.log('Finished D'); + resolve(); + }, 300); + }); + }); + + task('Ba', ['A'], function () { + console.log('Started Ba'); + return new Promise((resolve, reject) => { + setTimeout(() => { + console.log('Finished Ba'); + resolve(); + }, 50); + }); + }); + + task('Afail', function () { + console.log('Started failing task'); + return new Promise((resolve, reject) => { + setTimeout(() => { + console.log('Failing B with error'); + throw new Error('I failed'); + }, 50); + }); + }); + + task('simple1', ['A','B'], {concurrency: 2}, function () { + return new Promise((resolve, reject) => { + setTimeout(() => { + resolve(); + }, 50); + }); + }); + + task('simple2', ['C','D'], {concurrency: 2}, function () { + return new Promise((resolve, reject) => { + setTimeout(() => { + resolve(); + }, 50); + }); + }); + + task('seqconcurrent', ['simple1','simple2'], function () { + return new Promise((resolve, reject) => { + setTimeout(() => { + resolve(); + }, 50); + }); + }); + + task('concurrentconcurrent', ['simple1','simple2'], {concurrency: 2}, function () { + return new Promise((resolve, reject) => { + setTimeout(() => { + resolve(); + }, 50); + }); + }); + + task('subdep', ['A','Ba'], {concurrency: 2}, function () { + return new Promise((resolve, reject) => { + setTimeout(() => { + resolve(); + }, 50); + }); + }); + + task('fail', ['A', 'B', 'Afail'], {concurrency: 3}, function () { + return new Promise((resolve, reject) => { + setTimeout(() => { + resolve(); + }, 50); + }); + }); + +}); + + diff --git a/node_modules/jake/test/integration/jakelib/publish.jake.js b/node_modules/jake/test/integration/jakelib/publish.jake.js new file mode 100644 index 0000000..52dd04a --- /dev/null +++ b/node_modules/jake/test/integration/jakelib/publish.jake.js @@ -0,0 +1,49 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +const PROJECT_DIR = process.env.PROJECT_DIR; + +let fs = require('fs'); +let { publishTask, rmRf, mkdirP } = require(`${PROJECT_DIR}/lib/jake`); + +fs.writeFileSync('package.json', '{"version": "0.0.1"}'); +mkdirP('tmp_publish'); +fs.writeFileSync('tmp_publish/foo.txt', 'FOO'); + +publishTask('zerb', function () { + this.packageFiles.include([ + 'package.json' + , 'tmp_publish/**' + ]); + this.publishCmd = 'node -p -e "\'%filename\'"'; + this.gitCmd = 'echo' + this.scheduleDelay = 0; + + this._ensureRepoClean = function () {}; + this._getCurrentBranch = function () { + return 'v0.0' + }; +}); + +jake.setTaskTimeout(5000); + +jake.Task['publish'].on('complete', function () { + rmRf('tmp_publish', {silent: true}); + rmRf('package.json', {silent: true}); +}); + diff --git a/node_modules/jake/test/integration/jakelib/required_module.jake.js b/node_modules/jake/test/integration/jakelib/required_module.jake.js new file mode 100644 index 0000000..c63751d --- /dev/null +++ b/node_modules/jake/test/integration/jakelib/required_module.jake.js @@ -0,0 +1,10 @@ +let { task, namespace } = require("jake"); + +namespace('usingRequire', function () { + task('test', () => { + console.log('howdy test'); + }); +}); + + + diff --git a/node_modules/jake/test/integration/jakelib/rule.jake.js b/node_modules/jake/test/integration/jakelib/rule.jake.js new file mode 100644 index 0000000..8e977dd --- /dev/null +++ b/node_modules/jake/test/integration/jakelib/rule.jake.js @@ -0,0 +1,222 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +const PROJECT_DIR = process.env.PROJECT_DIR; + +let exec = require('child_process').execSync; +let fs = require('fs'); +let util = require('util'); +let { rule, rmRf } = require(`${PROJECT_DIR}/lib/jake`); + +directory('tmpsrc'); +directory('tmpbin'); + +//////////////////////////////////////////////////////////// +// Simple Suffix Rule +file('tmp', ['tmp_init', 'tmp_dep1.o', 'tmp_dep2.o'], function (params) { + console.log('tmp task'); + let data1 = fs.readFileSync('tmp_dep1.o'); + let data2 = fs.readFileSync('tmp_dep2.o'); + fs.writeFileSync('tmp', data1 + data2); +}); + +rule('.o', '.c', function () { + let cmd = util.format('cp %s %s', this.source, this.name); + console.log(cmd + ' task'); + exec(cmd); +}); + +file('tmp_dep1.c', function () { + fs.writeFileSync('tmp_dep1.c', 'src_1'); + console.log('tmp_dep1.c task'); +}); + +// note that tmp_dep2.o depends on tmp_dep2.c, which is a +// static file. +task('tmp_init', function () { + fs.writeFileSync('tmp_dep2.c', 'src_2'); + console.log('tmp_dep2.c task'); +}); +//////////////////////////////////////////////////////////// + +//////////////////////////////////////////////////////////// +// Pattern Rule +file('tmp_p', ['tmp_init', 'tmp_dep1.oo', 'tmp_dep2.oo'], function (params) { + console.log('tmp pattern task'); + let data1 = fs.readFileSync('tmp_dep1.oo'); + let data2 = fs.readFileSync('tmp_dep2.oo'); + fs.writeFileSync('tmp_p', data1 + data2 + ' pattern'); +}); + +rule('%.oo', '%.c', function () { + let cmd = util.format('cp %s %s', this.source, this.name); + console.log(cmd + ' task'); + exec(cmd); +}); +//////////////////////////////////////////////////////////// + +//////////////////////////////////////////////////////////// +// Pattern Rule with Folder +// i.e. rule('tmpbin/%.oo', 'tmpsrc/%.c', ... +file('tmp_pf', [ + 'tmp_src_init' + , 'tmpbin' + , 'tmpbin/tmp_dep1.oo' + , 'tmpbin/tmp_dep2.oo' ], function (params) { + console.log('tmp pattern folder task'); + let data1 = fs.readFileSync('tmpbin/tmp_dep1.oo'); + let data2 = fs.readFileSync('tmpbin/tmp_dep2.oo'); + fs.writeFileSync('tmp_pf', data1 + data2 + ' pattern folder'); +}); + +rule('tmpbin/%.oo', 'tmpsrc/%.c', function () { + let cmd = util.format('cp %s %s', this.source, this.name); + console.log(cmd + ' task'); + exec(cmd); +}); + +file('tmpsrc/tmp_dep2.c',['tmpsrc'], function () { + fs.writeFileSync('tmpsrc/tmp_dep2.c', 'src/src_2'); + console.log('tmpsrc/tmp_dep2.c task'); +}); + +// Create static files in folder tmpsrc. +task('tmp_src_init', ['tmpsrc'], function () { + fs.writeFileSync('tmpsrc/tmp_dep1.c', 'src/src_1'); + console.log('tmpsrc/tmp_dep1.c task'); +}); +//////////////////////////////////////////////////////////// + + +//////////////////////////////////////////////////////////// +// Namespace Test. This is a Mixed Test. +// Test for +// - rules belonging to different namespace. +// - rules with folder and pattern +task('tmp_ns', [ + 'tmpbin' + , 'rule:init' + , 'tmpbin/tmp_dep2.oo' // *** This relies on a rule defined before. + , 'rule:tmpbin/dep1.oo' + , 'rule:tmpbin/file2.oo' ], function () { + console.log('tmp pattern folder namespace task'); + let data1 = fs.readFileSync('tmpbin/dep1.oo'); + let data2 = fs.readFileSync('tmpbin/tmp_dep2.oo'); + let data3 = fs.readFileSync('tmpbin/file2.oo'); + fs.writeFileSync('tmp_ns', data1 + data2 + data3 + ' pattern folder namespace'); +}); + +namespace('rule', function () { + task('init', ['tmpsrc'], function () { + fs.writeFileSync('tmpsrc/file2.c', 'src/src_3'); + console.log('tmpsrc/file2.c init task'); + }); + + file('tmpsrc/dep1.c',['tmpsrc'], function () { + fs.writeFileSync('tmpsrc/dep1.c', 'src/src_1'); + console.log('tmpsrc/dep1.c task'); + }, {async: true}); + + rule('tmpbin/%.oo', 'tmpsrc/%.c', function () { + let cmd = util.format('cp %s %s', this.source, this.name); + console.log(cmd + ' ns task'); + exec(cmd); + }); +}); +//////////////////////////////////////////////////////////// + +//////////////////////////////////////////////////////////// +// Chain rule +// rule('tmpbin/%.pdf', 'tmpbin/%.dvi', function() { ... +// rule('tmpbin/%.dvi', 'tmpsrc/%.tex', ['tmpbin'], function() { ... +task('tmp_cr', [ + 'chainrule:init' + , 'chainrule:tmpbin/file1.pdf' + , 'chainrule:tmpbin/file2.pdf' ], function () { + console.log('tmp chainrule namespace task'); + let data1 = fs.readFileSync('tmpbin/file1.pdf'); + let data2 = fs.readFileSync('tmpbin/file2.pdf'); + fs.writeFileSync('tmp_cr', data1 + data2 + ' chainrule namespace'); +}); + +namespace('chainrule', function () { + task('init', ['tmpsrc', 'tmpbin'], function () { + fs.writeFileSync('tmpsrc/file1.tex', 'tex1 '); + fs.writeFileSync('tmpsrc/file2.tex', 'tex2 '); + console.log('chainrule init task'); + }); + + rule('tmpbin/%.pdf', 'tmpbin/%.dvi', function () { + let cmd = util.format('cp %s %s', this.source, this.name); + console.log(cmd + ' dvi->pdf task'); + exec(cmd); + }); + + rule('tmpbin/%.dvi', 'tmpsrc/%.tex', ['tmpbin'], function () { + let cmd = util.format('cp %s %s', this.source, this.name); + console.log(cmd + ' tex->dvi task'); + exec(cmd); + }); +}); +//////////////////////////////////////////////////////////// +namespace('precedence', function () { + task('test', ['foo.html'], function () { + console.log('ran test'); + }); + + rule('.html', '.txt', function () { + console.log('created html'); + let data = fs.readFileSync(this.source); + fs.writeFileSync(this.name, data.toString()); + }); +}); + +namespace('regexPattern', function () { + task('test', ['foo.html'], function () { + console.log('ran test'); + }); + + rule(/\.html$/, '.txt', function () { + console.log('created html'); + let data = fs.readFileSync(this.source); + fs.writeFileSync(this.name, data.toString()); + }); +}); + +namespace('sourceFunction', function () { + + let srcFunc = function (taskName) { + return taskName.replace(/\.[^.]+$/, '.txt'); + }; + + task('test', ['foo.html'], function () { + console.log('ran test'); + }); + + rule('.html', srcFunc, function () { + console.log('created html'); + let data = fs.readFileSync(this.source); + fs.writeFileSync(this.name, data.toString()); + }); +}); + +//////////////////////////////////////////////////////////// +task('clean', function () { + rmRf('./foo'); + rmRf('./tmp'); +}); diff --git a/node_modules/jake/test/integration/publish_task.js b/node_modules/jake/test/integration/publish_task.js new file mode 100644 index 0000000..034fd94 --- /dev/null +++ b/node_modules/jake/test/integration/publish_task.js @@ -0,0 +1,24 @@ +let assert = require('assert'); +let exec = require('child_process').execSync; + +suite('publishTask', function () { + + this.timeout(7000); + + test('default task', function () { + let out = exec('./node_modules/.bin/jake -q publish').toString().trim(); + let expected = [ + 'Fetched remote tags.' + , 'On branch v0.0' + , 'Bumped version number to v0.0.2.' + , 'Created package for zerb v0.0.2' + , 'Publishing zerb v0.0.2' + , './pkg/zerb-v0.0.2.tar.gz' + , 'BOOM! Published.' + , 'Cleaned up package' + ].join('\n'); + assert.equal(expected, out); + }); + +}); + diff --git a/node_modules/jake/test/integration/rule.js b/node_modules/jake/test/integration/rule.js new file mode 100644 index 0000000..b837b1d --- /dev/null +++ b/node_modules/jake/test/integration/rule.js @@ -0,0 +1,216 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +const PROJECT_DIR = process.env.PROJECT_DIR; + +let assert = require('assert'); +let exec = require('child_process').execSync; +let fs = require('fs'); +let { Rule } = require(`${PROJECT_DIR}/lib/rule`); +let { rmRf } = require(`${PROJECT_DIR}/lib/jake`); + +let cleanUpAndNext = function (callback) { + // Gotta add globbing to file utils rmRf + let tmpFiles = [ + 'tmp' + , 'tmp_ns' + , 'tmp_cr' + , 'tmp_p' + , 'tmp_pf' + , 'tmpbin' + , 'tmpsrc' + , 'tmp_dep1.c' + , 'tmp_dep1.o' + , 'tmp_dep1.oo' + , 'tmp_dep2.c' + , 'tmp_dep2.o' + , 'tmp_dep2.oo' + , 'foo' + , 'foo.html' + ]; + tmpFiles.forEach(function (f) { + rmRf(f, { + silent: true + }); + }); + callback && callback(); +}; + +suite('rule', function () { + + this.timeout(7000); + + setup(function (next) { + cleanUpAndNext(next); + }); + + + // - name foo:bin/main.o + // - pattern bin/%.o + // - source src/%.c + // + // return { + // 'dep' : 'foo:src/main.c', + // 'file': 'src/main.c' + // }; + test('Rule.getSource', function () { + let src = Rule.getSource('foo:bin/main.o', 'bin/%.o', 'src/%.c'); + assert.equal('foo:src/main.c', src); + }); + + test('rule w/o pattern', function () { + let out = exec( './node_modules/.bin/jake -q tmp').toString().trim(); + let output = [ + "tmp_dep2.c task" + , "tmp_dep1.c task" + , "cp tmp_dep1.c tmp_dep1.o task" + , "cp tmp_dep2.c tmp_dep2.o task" + , "tmp task"]; + assert.equal( output.join('\n'), out); + let data = fs.readFileSync(process.cwd() + '/tmp'); + assert.equal('src_1src_2', data.toString()); + cleanUpAndNext(); + }); + + test('rule w pattern w/o folder w/o namespace', function () { + let out = exec( './node_modules/.bin/jake -q tmp_p').toString().trim(); + let output = [ + "tmp_dep2.c task" + , "tmp_dep1.c task" + , "cp tmp_dep1.c tmp_dep1.oo task" + , "cp tmp_dep2.c tmp_dep2.oo task" + , "tmp pattern task"]; + let data; + assert.equal( output.join('\n'), out); + data = fs.readFileSync(process.cwd() + '/tmp_p'); + assert.equal('src_1src_2 pattern', data.toString()); + cleanUpAndNext(); + }); + + test('rule w pattern w folder w/o namespace', function () { + let out = exec( './node_modules/.bin/jake -q tmp_pf').toString().trim(); + let output = [ + "tmpsrc/tmp_dep1.c task" + , "cp tmpsrc/tmp_dep1.c tmpbin/tmp_dep1.oo task" + , "tmpsrc/tmp_dep2.c task" + , "cp tmpsrc/tmp_dep2.c tmpbin/tmp_dep2.oo task" + , "tmp pattern folder task"]; + let data; + assert.equal( output.join('\n'), out); + data = fs.readFileSync(process.cwd() + '/tmp_pf'); + assert.equal('src/src_1src/src_2 pattern folder', data.toString()); + cleanUpAndNext(); + }); + + test.skip('rule w pattern w folder w namespace', function () { + let out = exec( './node_modules/.bin/jake -q tmp_ns').toString().trim(); + let output = [ + "tmpsrc/file2.c init task" // yes + , "tmpsrc/tmp_dep2.c task" // no + , "cp tmpsrc/tmp_dep2.c tmpbin/tmp_dep2.oo task" // no + , "tmpsrc/dep1.c task" // no + , "cp tmpsrc/dep1.c tmpbin/dep1.oo ns task" // no + , "cp tmpsrc/file2.c tmpbin/file2.oo ns task" // yes + , "tmp pattern folder namespace task"]; // yes + let data; + assert.equal( output.join('\n'), out); + data = fs.readFileSync(process.cwd() + '/tmp_ns'); + assert.equal('src/src_1src/src_2src/src_3 pattern folder namespace', data.toString()); + cleanUpAndNext(); + }); + + test.skip('rule w chain w pattern w folder w namespace', function () { + let out = exec( './node_modules/.bin/jake -q tmp_cr').toString().trim(); + let output = [ + "chainrule init task" + , "cp tmpsrc/file1.tex tmpbin/file1.dvi tex->dvi task" + , "cp tmpbin/file1.dvi tmpbin/file1.pdf dvi->pdf task" + , "cp tmpsrc/file2.tex tmpbin/file2.dvi tex->dvi task" + , "cp tmpbin/file2.dvi tmpbin/file2.pdf dvi->pdf task" + , "tmp chainrule namespace task"]; + let data; + assert.equal( output.join('\n'), out); + data = fs.readFileSync(process.cwd() + '/tmp_cr'); + assert.equal('tex1 tex2 chainrule namespace', data.toString()); + cleanUpAndNext(); + }); + + + ['precedence', 'regexPattern', 'sourceFunction'].forEach(function (key) { + + test('rule with source file not created yet (' + key + ')', function () { + let write = process.stderr.write; + process.stderr.write = () => {}; + rmRf('foo.txt', {silent: true}); + rmRf('foo.html', {silent: true}); + try { + exec('./node_modules/.bin/jake ' + key + ':test'); + } + catch(err) { + // foo.txt prereq doesn't exist yet + assert.ok(err.message.indexOf('Unknown task "foo.html"') > -1); + } + process.stderr.write = write; + }); + + test('rule with source file now created (' + key + ')', function () { + fs.writeFileSync('foo.txt', ''); + let out = exec('./node_modules/.bin/jake -q ' + key + ':test').toString().trim(); + // Should run prereq and test task + let output = [ + 'created html' + , 'ran test' + ]; + assert.equal(output.join('\n'), out); + }); + + test('rule with source file modified (' + key + ')', function (next) { + setTimeout(function () { + fs.writeFileSync('foo.txt', ''); + let out = exec('./node_modules/.bin/jake -q ' + key + ':test').toString().trim(); + // Should again run both prereq and test task + let output = [ + 'created html' + , 'ran test' + ]; + assert.equal(output.join('\n'), out); + //next(); + cleanUpAndNext(next); + }, 1000); // Wait to do the touch to ensure mod-time is different + }); + + test('rule with existing objective file and no source ' + + ' (should be normal file-task) (' + key + ')', function () { + // Remove just the source file + fs.writeFileSync('foo.html', ''); + rmRf('foo.txt', {silent: true}); + let out = exec('./node_modules/.bin/jake -q ' + key + ':test').toString().trim(); + // Should treat existing objective file as plain file-task, + // and just run test-task + let output = [ + 'ran test' + ]; + assert.equal(output.join('\n'), out); + cleanUpAndNext(); + }); + + }); + +}); + + diff --git a/node_modules/jake/test/integration/selfdep.js b/node_modules/jake/test/integration/selfdep.js new file mode 100644 index 0000000..22d58d1 --- /dev/null +++ b/node_modules/jake/test/integration/selfdep.js @@ -0,0 +1,39 @@ +let assert = require('assert'); +let exec = require('child_process').execSync; + +suite('selfDep', function () { + + this.timeout(7000); + + let origStderrWrite; + + setup(function () { + origStderrWrite = process.stderr.write; + process.stderr.write = function () {}; + }); + + teardown(function () { + process.stderr.write = origStderrWrite; + }); + + test('self dep const', function () { + try { + exec('./node_modules/.bin/jake selfdepconst'); + } + catch(e) { + assert(e.message.indexOf('dependency of itself') > -1) + } + }); + + test('self dep dyn', function () { + try { + exec('./node_modules/.bin/jake selfdepdyn'); + } + catch(e) { + assert(e.message.indexOf('dependency of itself') > -1) + } + }); + +}); + + diff --git a/node_modules/jake/test/integration/task_base.js b/node_modules/jake/test/integration/task_base.js new file mode 100644 index 0000000..36e20e8 --- /dev/null +++ b/node_modules/jake/test/integration/task_base.js @@ -0,0 +1,164 @@ +let assert = require('assert'); +let h = require('./helpers'); +let exec = require('child_process').execSync; + +suite('taskBase', function () { + + this.timeout(7000); + + test('default task', function () { + let out; + out = exec('./node_modules/.bin/jake -q').toString().trim(); + assert.equal(out, 'default task'); + out = exec('./node_modules/.bin/jake -q default').toString().trim(); + assert.equal(out, 'default task'); + }); + + test('task with no action', function () { + let out = exec('./node_modules/.bin/jake -q noAction').toString().trim(); + assert.equal(out, 'default task'); + }); + + test('a task with no action and no prereqs', function () { + exec('./node_modules/.bin/jake noActionNoPrereqs'); + }); + + test('a task that exists at the top-level, and not in the specified namespace, should error', function () { + let res = require('child_process').spawnSync('./node_modules/.bin/jake', + ['asdfasdfasdf:zerbofrangazoomy']); + let err = res.stderr.toString(); + assert.ok(err.indexOf('Unknown task' > -1)); + }); + + test('passing args to a task', function () { + let out = exec('./node_modules/.bin/jake -q argsEnvVars[foo,bar]').toString().trim(); + let parsed = h.parse(out); + let args = parsed.args; + assert.equal(args[0], 'foo'); + assert.equal(args[1], 'bar'); + }); + + test('a task with environment vars', function () { + let out = exec('./node_modules/.bin/jake -q argsEnvVars foo=bar baz=qux').toString().trim(); + let parsed = h.parse(out); + let env = parsed.env; + assert.equal(env.foo, 'bar'); + assert.equal(env.baz, 'qux'); + }); + + test('passing args and using environment vars', function () { + let out = exec('./node_modules/.bin/jake -q argsEnvVars[foo,bar] foo=bar baz=qux').toString().trim(); + let parsed = h.parse(out); + let args = parsed.args; + let env = parsed.env; + assert.equal(args[0], 'foo'); + assert.equal(args[1], 'bar'); + assert.equal(env.foo, 'bar'); + assert.equal(env.baz, 'qux'); + }); + + test('a simple prereq', function () { + let out = exec('./node_modules/.bin/jake -q foo:baz').toString().trim(); + assert.equal(out, 'foo:bar task\nfoo:baz task'); + }); + + test('a duplicate prereq only runs once', function () { + let out = exec('./node_modules/.bin/jake -q foo:asdf').toString().trim(); + assert.equal(out, 'foo:bar task\nfoo:baz task\nfoo:asdf task'); + }); + + test('a prereq with command-line args', function () { + let out = exec('./node_modules/.bin/jake -q foo:qux').toString().trim(); + assert.equal(out, 'foo:bar[asdf,qwer] task\nfoo:qux task'); + }); + + test('a prereq with args via invoke', function () { + let out = exec('./node_modules/.bin/jake -q foo:frang[zxcv,uiop]').toString().trim(); + assert.equal(out, 'foo:bar[zxcv,uiop] task\nfoo:frang task'); + }); + + test('a prereq with args via execute', function () { + let out = exec('./node_modules/.bin/jake -q foo:zerb[zxcv,uiop]').toString().trim(); + assert.equal(out, 'foo:bar[zxcv,uiop] task\nfoo:zerb task'); + }); + + test('repeating the task via execute', function () { + let out = exec('./node_modules/.bin/jake -q foo:voom').toString().trim(); + assert.equal(out, 'foo:bar task\nfoo:bar task\ncomplete\ncomplete'); + }); + + test('prereq execution-order', function () { + let out = exec('./node_modules/.bin/jake -q hoge:fuga').toString().trim(); + assert.equal(out, 'hoge:hoge task\nhoge:piyo task\nhoge:fuga task'); + }); + + test('basic async task', function () { + let out = exec('./node_modules/.bin/jake -q bar:bar').toString().trim(); + assert.equal(out, 'bar:foo task\nbar:bar task'); + }); + + test('promise async task', function () { + let out = exec('./node_modules/.bin/jake -q bar:dependOnpromise').toString().trim(); + assert.equal(out, 'bar:promise task\nbar:dependOnpromise task saw value 123654'); + }); + + test('failing promise async task', function () { + try { + exec('./node_modules/.bin/jake -q bar:brokenPromise'); + } + catch(e) { + assert(e.message.indexOf('Command failed') > -1); + } + }); + + test('that current-prereq index gets reset', function () { + let out = exec('./node_modules/.bin/jake -q hoge:kira').toString().trim(); + assert.equal(out, 'hoge:hoge task\nhoge:piyo task\nhoge:fuga task\n' + + 'hoge:charan task\nhoge:gero task\nhoge:kira task'); + }); + + test('modifying a task by adding prereq during execution', function () { + let out = exec('./node_modules/.bin/jake -q voom').toString().trim(); + assert.equal(out, 2); + }); + + test('listening for task error-event', function () { + try { + exec('./node_modules/.bin/jake -q vronk:groo').toString().trim(); + } + catch(e) { + assert(e.message.indexOf('OMFGZONG') > -1); + } + }); + + test('listening for jake error-event', function () { + let out = exec('./node_modules/.bin/jake -q throwy').toString().trim(); + assert(out.indexOf('Emitted\nError: I am bad') > -1); + }); + + test('listening for jake unhandledRejection-event', function () { + let out = exec('./node_modules/.bin/jake -q promiseRejecter').toString().trim(); + assert.equal(out, '<promise rejected on purpose>'); + }); + + test('large number of same prereqs', function () { + let out = exec('./node_modules/.bin/jake -q large:same').toString().trim(); + assert.equal(out, 'large:leaf\nlarge:same'); + }); + + test('large number of different prereqs', function () { + let out = exec('./node_modules/.bin/jake -q large:different').toString().trim(); + assert.equal(out, 'leaf-12\nleaf-123\nlarge:different'); + }); + + test('large number of different prereqs', function () { + let out = exec('./node_modules/.bin/jake -q usingRequire:test').toString().trim(); + assert.equal(out, 'howdy test'); + }); + + test('modifying a namespace by adding a new task', function () { + let out = exec('./node_modules/.bin/jake -q one:two').toString().trim(); + assert.equal('one:one\none:two', out); + }); + +}); diff --git a/node_modules/jake/test/unit/jakefile.js b/node_modules/jake/test/unit/jakefile.js new file mode 100644 index 0000000..89ff523 --- /dev/null +++ b/node_modules/jake/test/unit/jakefile.js @@ -0,0 +1,36 @@ + +task('foo', function () { + console.log('ran top-level foo'); +}); + +task('bar', function () { + console.log('ran top-level bar'); +}); + +task('zerb', function () { + console.log('ran zerb'); +}); + +namespace('zooby', function () { + task('zerp', function () {}); + + task('derp', ['zerp'], function () {}); + + namespace('frang', function () { + + namespace('w00t', function () { + task('bar', function () { + console.log('ran zooby:frang:w00t:bar'); + }); + }); + + task('asdf', function () {}); + }); + +}); + +namespace('hurr', function () { + namespace('durr'); +}); + + diff --git a/node_modules/jake/test/unit/namespace.js b/node_modules/jake/test/unit/namespace.js new file mode 100644 index 0000000..c6b3ff5 --- /dev/null +++ b/node_modules/jake/test/unit/namespace.js @@ -0,0 +1,77 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +const PROJECT_DIR = process.env.PROJECT_DIR; + +// Load the jake global +require(`${PROJECT_DIR}/lib/jake`); +let { Namespace } = require(`${PROJECT_DIR}/lib/namespace`); + +require('./jakefile'); + +let assert = require('assert'); + +suite('namespace', function () { + + this.timeout(7000); + + test('resolve namespace by relative name', function () { + let aaa, bbb, ccc; + aaa = namespace('aaa', function () { + bbb = namespace('bbb', function () { + ccc = namespace('ccc', function () { + }); + }); + }); + + assert.ok(aaa, Namespace.ROOT_NAMESPACE.resolveNamespace('aaa')); + assert.ok(bbb === aaa.resolveNamespace('bbb')); + assert.ok(ccc === aaa.resolveNamespace('bbb:ccc')); + }); + + test('resolve task in sub-namespace by relative path', function () { + let curr = Namespace.ROOT_NAMESPACE.resolveNamespace('zooby'); + let task = curr.resolveTask('frang:w00t:bar'); + assert.ok(task.action.toString().indexOf('zooby:frang:w00t:bar') > -1); + }); + + test('prefer local to top-level', function () { + let curr = Namespace.ROOT_NAMESPACE.resolveNamespace('zooby:frang:w00t'); + let task = curr.resolveTask('bar'); + assert.ok(task.action.toString().indexOf('zooby:frang:w00t:bar') > -1); + }); + + test('does resolve top-level', function () { + let curr = Namespace.ROOT_NAMESPACE.resolveNamespace('zooby:frang:w00t'); + let task = curr.resolveTask('foo'); + assert.ok(task.action.toString().indexOf('top-level foo') > -1); + }); + + test('absolute lookup works from sub-namespaces', function () { + let curr = Namespace.ROOT_NAMESPACE.resolveNamespace('hurr:durr'); + let task = curr.resolveTask('zooby:frang:w00t:bar'); + assert.ok(task.action.toString().indexOf('zooby:frang:w00t:bar') > -1); + }); + + test('resolution miss with throw error', function () { + let curr = Namespace.ROOT_NAMESPACE; + let task = curr.resolveTask('asdf:qwer'); + assert.ok(!task); + }); + +}); diff --git a/node_modules/jake/test/unit/parseargs.js b/node_modules/jake/test/unit/parseargs.js new file mode 100644 index 0000000..7a3ddd5 --- /dev/null +++ b/node_modules/jake/test/unit/parseargs.js @@ -0,0 +1,169 @@ +/* + * Jake JavaScript build tool + * Copyright 2112 Matthew Eernisse (mde@fleegix.org) + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * +*/ + +const PROJECT_DIR = process.env.PROJECT_DIR; + +let parseargs = require(`${PROJECT_DIR}/lib/parseargs`); +let assert = require('assert'); +let optsReg = [ + { full: 'directory', + abbr: 'C', + preempts: false, + expectValue: true + }, + { full: 'jakefile', + abbr: 'f', + preempts: false, + expectValue: true + }, + { full: 'tasks', + abbr: 'T', + preempts: true + }, + { full: 'tasks', + abbr: 'ls', + preempts: true + }, + { full: 'trace', + abbr: 't', + preempts: false, + expectValue: false + }, + { full: 'help', + abbr: 'h', + preempts: true + }, + { full: 'version', + abbr: 'V', + preempts: true + } +]; +let p = new parseargs.Parser(optsReg); +let z = function (s) { return s.split(' '); }; +let res; + +suite('parseargs', function () { + + test('long preemptive opt and val with equal-sign, ignore further opts', function () { + res = p.parse(z('--tasks=foo --jakefile=asdf')); + assert.equal('foo', res.opts.tasks); + assert.equal(undefined, res.opts.jakefile); + }); + + test('long preemptive opt and val without equal-sign, ignore further opts', function () { + res = p.parse(z('--tasks foo --jakefile=asdf')); + assert.equal('foo', res.opts.tasks); + assert.equal(undefined, res.opts.jakefile); + }); + + test('long preemptive opt and no val, ignore further opts', function () { + res = p.parse(z('--tasks --jakefile=asdf')); + assert.equal(true, res.opts.tasks); + assert.equal(undefined, res.opts.jakefile); + }); + + test('preemptive opt with no val, should be true', function () { + res = p.parse(z('-T')); + assert.equal(true, res.opts.tasks); + }); + + test('preemptive opt with no val, should be true and ignore further opts', function () { + res = p.parse(z('-T -f')); + assert.equal(true, res.opts.tasks); + assert.equal(undefined, res.opts.jakefile); + }); + + test('preemptive opt with val, should be val', function () { + res = p.parse(z('-T zoobie -f foo/bar/baz')); + assert.equal('zoobie', res.opts.tasks); + assert.equal(undefined, res.opts.jakefile); + }); + + test('-f expects a value, -t does not (howdy is task-name)', function () { + res = p.parse(z('-f zoobie -t howdy')); + assert.equal('zoobie', res.opts.jakefile); + assert.equal(true, res.opts.trace); + assert.equal('howdy', res.taskNames[0]); + }); + + test('different order, -f expects a value, -t does not (howdy is task-name)', function () { + res = p.parse(z('-f zoobie howdy -t')); + assert.equal('zoobie', res.opts.jakefile); + assert.equal(true, res.opts.trace); + assert.equal('howdy', res.taskNames[0]); + }); + + test('-f expects a value, -t does not (foo=bar is env var)', function () { + res = p.parse(z('-f zoobie -t foo=bar')); + assert.equal('zoobie', res.opts.jakefile); + assert.equal(true, res.opts.trace); + assert.equal('bar', res.envVars.foo); + assert.equal(undefined, res.taskNames[0]); + }); + + test('-f expects a value, -t does not (foo=bar is env-var, task-name follows)', function () { + res = p.parse(z('-f zoobie -t howdy foo=bar')); + assert.equal('zoobie', res.opts.jakefile); + assert.equal(true, res.opts.trace); + assert.equal('bar', res.envVars.foo); + assert.equal('howdy', res.taskNames[0]); + }); + + test('-t does not expect a value, -f does (howdy is task-name)', function () { + res = p.parse(z('-t howdy -f zoobie')); + assert.equal(true, res.opts.trace); + assert.equal('zoobie', res.opts.jakefile); + assert.equal('howdy', res.taskNames[0]); + }); + + test('--trace does not expect a value, -f does (howdy is task-name)', function () { + res = p.parse(z('--trace howdy --jakefile zoobie')); + assert.equal(true, res.opts.trace); + assert.equal('zoobie', res.opts.jakefile); + assert.equal('howdy', res.taskNames[0]); + }); + + test('--trace does not expect a value (equal), -f does (throw howdy away)', function () { + res = p.parse(z('--trace=howdy --jakefile=zoobie')); + assert.equal(true, res.opts.trace); + assert.equal('zoobie', res.opts.jakefile); + assert.equal(undefined, res.taskNames[0]); + }); + + /* +, test('task-name with positional args', function () { + res = p.parse(z('foo:bar[asdf,qwer]')); + assert.equal('asdf', p.taskArgs[0]); + assert.equal('qwer', p.taskArgs[1]); + } + +, test('opts, env vars, task-name with positional args', function () { + res = p.parse(z('-f ./tests/Jakefile -t default[asdf,qwer] foo=bar')); + assert.equal('./tests/Jakefile', res.opts.jakefile); + assert.equal(true, res.opts.trace); + assert.equal('bar', res.envVars.foo); + assert.equal('default', res.taskName); + assert.equal('asdf', p.taskArgs[0]); + assert.equal('qwer', p.taskArgs[1]); + } +*/ + + +}); + + diff --git a/node_modules/jake/usage.txt b/node_modules/jake/usage.txt new file mode 100644 index 0000000..392b6d8 --- /dev/null +++ b/node_modules/jake/usage.txt @@ -0,0 +1,16 @@ +Jake JavaScript build tool +******************************************************************************** +If no flags are given, Jake looks for a Jakefile or Jakefile.js in the current directory. +******************************************************************************** +{Usage}: jake [options ...] [env variables ...] target + +{Options}: + -f, --jakefile FILE Use FILE as the Jakefile. + -C, --directory DIRECTORY Change to DIRECTORY before running tasks. + -B, --always-make Unconditionally make all targets. + -T/ls, --tasks Display the tasks (matching optional PATTERN) with descriptions, then exit. + -J, --jakelibdir JAKELIBDIR Auto-import any .jake files in JAKELIBDIR. (default is \'jakelib\') + -h, --help Display this help message. + -V/v, --version Display the Jake version. + -ar, --allow-rejection Keep running even after unhandled promise rejection + diff --git a/node_modules/minimatch/LICENSE b/node_modules/minimatch/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minimatch/README.md b/node_modules/minimatch/README.md new file mode 100644 index 0000000..ad72b81 --- /dev/null +++ b/node_modules/minimatch/README.md @@ -0,0 +1,209 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://secure.travis-ci.org/isaacs/minimatch.svg)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +## Minimatch Class + +Create a minimatch object by instantiating the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) + + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. diff --git a/node_modules/minimatch/minimatch.js b/node_modules/minimatch/minimatch.js new file mode 100644 index 0000000..5b5f8cf --- /dev/null +++ b/node_modules/minimatch/minimatch.js @@ -0,0 +1,923 @@ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = { sep: '/' } +try { + path = require('path') +} catch (er) {} + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = require('brace-expansion') + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + a = a || {} + b = b || {} + var t = {} + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return minimatch + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig.minimatch(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return Minimatch + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + // "" only matches "" + if (pattern.trim() === '') return p === '' + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = console.error + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + if (typeof pattern === 'undefined') { + throw new TypeError('undefined pattern') + } + + if (options.nobrace || + !pattern.match(/\{.*\}/)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + if (pattern.length > 1024 * 64) { + throw new TypeError('pattern is too long') + } + + var options = this.options + + // shortcuts + if (!options.noglobstar && pattern === '**') return GLOBSTAR + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + case '/': + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:<pattern>)<type> + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + if (inClass) { + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '.': + case '[': + case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = match +function match (f, partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p + } + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') + return emptyFileEnd + } + + // should be unreachable. + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} diff --git a/node_modules/minimatch/package.json b/node_modules/minimatch/package.json new file mode 100644 index 0000000..c4514c8 --- /dev/null +++ b/node_modules/minimatch/package.json @@ -0,0 +1,30 @@ +{ + "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "3.0.4", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap test/*.js --cov", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "engines": { + "node": "*" + }, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "devDependencies": { + "tap": "^10.3.2" + }, + "license": "ISC", + "files": [ + "minimatch.js" + ] +} diff --git a/node_modules/supports-color/browser.js b/node_modules/supports-color/browser.js new file mode 100644 index 0000000..62afa3a --- /dev/null +++ b/node_modules/supports-color/browser.js @@ -0,0 +1,5 @@ +'use strict'; +module.exports = { + stdout: false, + stderr: false +}; diff --git a/node_modules/supports-color/index.js b/node_modules/supports-color/index.js new file mode 100644 index 0000000..1704131 --- /dev/null +++ b/node_modules/supports-color/index.js @@ -0,0 +1,131 @@ +'use strict'; +const os = require('os'); +const hasFlag = require('has-flag'); + +const env = process.env; + +let forceColor; +if (hasFlag('no-color') || + hasFlag('no-colors') || + hasFlag('color=false')) { + forceColor = false; +} else if (hasFlag('color') || + hasFlag('colors') || + hasFlag('color=true') || + hasFlag('color=always')) { + forceColor = true; +} +if ('FORCE_COLOR' in env) { + forceColor = env.FORCE_COLOR.length === 0 || parseInt(env.FORCE_COLOR, 10) !== 0; +} + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +} + +function supportsColor(stream) { + if (forceColor === false) { + return 0; + } + + if (hasFlag('color=16m') || + hasFlag('color=full') || + hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + + if (stream && !stream.isTTY && forceColor !== true) { + return 0; + } + + const min = forceColor ? 1 : 0; + + if (process.platform === 'win32') { + // Node.js 7.5.0 is the first version of Node.js to include a patch to + // libuv that enables 256 color output on Windows. Anything earlier and it + // won't work. However, here we target Node.js 8 at minimum as it is an LTS + // release, and Node.js 7 is not. Windows 10 build 10586 is the first Windows + // release that supports 256 colors. Windows 10 build 14931 is the first release + // that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(process.versions.node.split('.')[0]) >= 8 && + Number(osRelease[0]) >= 10 && + Number(osRelease[2]) >= 10586 + ) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + + if (env.COLORTERM === 'truecolor') { + return 3; + } + + if ('TERM_PROGRAM' in env) { + const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + return version >= 3 ? 3 : 2; + case 'Apple_Terminal': + return 2; + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + if (env.TERM === 'dumb') { + return min; + } + + return min; +} + +function getSupportLevel(stream) { + const level = supportsColor(stream); + return translateLevel(level); +} + +module.exports = { + supportsColor: getSupportLevel, + stdout: getSupportLevel(process.stdout), + stderr: getSupportLevel(process.stderr) +}; diff --git a/node_modules/supports-color/license b/node_modules/supports-color/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/supports-color/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/supports-color/package.json b/node_modules/supports-color/package.json new file mode 100644 index 0000000..ad199f5 --- /dev/null +++ b/node_modules/supports-color/package.json @@ -0,0 +1,53 @@ +{ + "name": "supports-color", + "version": "5.5.0", + "description": "Detect whether a terminal supports color", + "license": "MIT", + "repository": "chalk/supports-color", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js", + "browser.js" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "ansi", + "styles", + "tty", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "support", + "supports", + "capability", + "detect", + "truecolor", + "16m" + ], + "dependencies": { + "has-flag": "^3.0.0" + }, + "devDependencies": { + "ava": "^0.25.0", + "import-fresh": "^2.0.0", + "xo": "^0.20.0" + }, + "browser": "browser.js" +} diff --git a/node_modules/supports-color/readme.md b/node_modules/supports-color/readme.md new file mode 100644 index 0000000..f6e4019 --- /dev/null +++ b/node_modules/supports-color/readme.md @@ -0,0 +1,66 @@ +# supports-color [![Build Status](https://travis-ci.org/chalk/supports-color.svg?branch=master)](https://travis-ci.org/chalk/supports-color) + +> Detect whether a terminal supports color + + +## Install + +``` +$ npm install supports-color +``` + + +## Usage + +```js +const supportsColor = require('supports-color'); + +if (supportsColor.stdout) { + console.log('Terminal stdout supports color'); +} + +if (supportsColor.stdout.has256) { + console.log('Terminal stdout supports 256 colors'); +} + +if (supportsColor.stderr.has16m) { + console.log('Terminal stderr supports 16 million colors (truecolor)'); +} +``` + + +## API + +Returns an `Object` with a `stdout` and `stderr` property for testing either streams. Each property is an `Object`, or `false` if color is not supported. + +The `stdout`/`stderr` objects specifies a level of support for color through a `.level` property and a corresponding flag: + +- `.level = 1` and `.hasBasic = true`: Basic color support (16 colors) +- `.level = 2` and `.has256 = true`: 256 color support +- `.level = 3` and `.has16m = true`: Truecolor support (16 million colors) + + +## Info + +It obeys the `--color` and `--no-color` CLI flags. + +Can be overridden by the user with the flags `--color` and `--no-color`. For situations where using `--color` is not possible, add the environment variable `FORCE_COLOR=1` to forcefully enable color or `FORCE_COLOR=0` to forcefully disable. The use of `FORCE_COLOR` overrides all other color support checks. + +Explicit 256/Truecolor mode can be enabled using the `--color=256` and `--color=16m` flags, respectively. + + +## Related + +- [supports-color-cli](https://github.com/chalk/supports-color-cli) - CLI for this module +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +## License + +MIT diff --git a/node_modules/yaml/LICENSE b/node_modules/yaml/LICENSE new file mode 100644 index 0000000..7ca44e3 --- /dev/null +++ b/node_modules/yaml/LICENSE @@ -0,0 +1,13 @@ +Copyright 2018 Eemeli Aro <eemeli@gmail.com> + +Permission to use, copy, modify, and/or distribute this software for any purpose +with or without fee is hereby granted, provided that the above copyright notice +and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF +THIS SOFTWARE. diff --git a/node_modules/yaml/README.md b/node_modules/yaml/README.md new file mode 100644 index 0000000..2c51aea --- /dev/null +++ b/node_modules/yaml/README.md @@ -0,0 +1,127 @@ +# YAML <a href="https://www.npmjs.com/package/yaml"><img align="right" src="https://badge.fury.io/js/yaml.svg" title="npm package" /></a> + +`yaml` is a JavaScript parser and stringifier for [YAML](http://yaml.org/), a human friendly data serialization standard. It supports both parsing and stringifying data using all versions of YAML, along with all common data schemas. As a particularly distinguishing feature, `yaml` fully supports reading and writing comments and blank lines in YAML documents. + +The library is released under the ISC open source license, and the code is [available on GitHub](https://github.com/eemeli/yaml/). It has no external dependencies and runs on Node.js 6 and later, and in browsers from IE 11 upwards. + +For the purposes of versioning, any changes that break any of the endpoints or APIs documented here will be considered semver-major breaking changes. Undocumented library internals may change between minor versions, and previous APIs may be deprecated (but not removed). + +For more information, see the project's documentation site: [**eemeli.org/yaml/v1**](https://eemeli.org/yaml/v1/) + +To install: + +```sh +npm install yaml +``` + +**Note:** This is `yaml@1`. You may also be interested in the next version, currently available as [`yaml@next`](https://www.npmjs.com/package/yaml/v/next). + +## API Overview + +The API provided by `yaml` has three layers, depending on how deep you need to go: [Parse & Stringify](https://eemeli.org/yaml/v1/#parse-amp-stringify), [Documents](https://eemeli.org/yaml/#documents), and the [CST Parser](https://eemeli.org/yaml/#cst-parser). The first has the simplest API and "just works", the second gets you all the bells and whistles supported by the library along with a decent [AST](https://eemeli.org/yaml/#content-nodes), and the third is the closest to YAML source, making it fast, raw, and crude. + +```js +import YAML from 'yaml' +// or +const YAML = require('yaml') +``` + +### Parse & Stringify + +- [`YAML.parse(str, options): value`](https://eemeli.org/yaml/v1/#yaml-parse) +- [`YAML.stringify(value, options): string`](https://eemeli.org/yaml/v1/#yaml-stringify) + +### YAML Documents + +- [`YAML.createNode(value, wrapScalars, tag): Node`](https://eemeli.org/yaml/v1/#creating-nodes) +- [`YAML.defaultOptions`](https://eemeli.org/yaml/v1/#options) +- [`YAML.Document`](https://eemeli.org/yaml/v1/#yaml-documents) + - [`constructor(options)`](https://eemeli.org/yaml/v1/#creating-documents) + - [`defaults`](https://eemeli.org/yaml/v1/#options) + - [`#anchors`](https://eemeli.org/yaml/v1/#working-with-anchors) + - [`#contents`](https://eemeli.org/yaml/v1/#content-nodes) + - [`#errors`](https://eemeli.org/yaml/v1/#errors) +- [`YAML.parseAllDocuments(str, options): YAML.Document[]`](https://eemeli.org/yaml/v1/#parsing-documents) +- [`YAML.parseDocument(str, options): YAML.Document`](https://eemeli.org/yaml/v1/#parsing-documents) + +```js +import { Pair, YAMLMap, YAMLSeq } from 'yaml/types' +``` + +- [`new Pair(key, value)`](https://eemeli.org/yaml/v1/#creating-nodes) +- [`new YAMLMap()`](https://eemeli.org/yaml/v1/#creating-nodes) +- [`new YAMLSeq()`](https://eemeli.org/yaml/v1/#creating-nodes) + +### CST Parser + +```js +import parseCST from 'yaml/parse-cst' +``` + +- [`parseCST(str): CSTDocument[]`](https://eemeli.org/yaml/v1/#parsecst) +- [`YAML.parseCST(str): CSTDocument[]`](https://eemeli.org/yaml/v1/#parsecst) + +## YAML.parse + +```yaml +# file.yml +YAML: + - A human-readable data serialization language + - https://en.wikipedia.org/wiki/YAML +yaml: + - A complete JavaScript implementation + - https://www.npmjs.com/package/yaml +``` + +```js +import fs from 'fs' +import YAML from 'yaml' + +YAML.parse('3.14159') +// 3.14159 + +YAML.parse('[ true, false, maybe, null ]\n') +// [ true, false, 'maybe', null ] + +const file = fs.readFileSync('./file.yml', 'utf8') +YAML.parse(file) +// { YAML: +// [ 'A human-readable data serialization language', +// 'https://en.wikipedia.org/wiki/YAML' ], +// yaml: +// [ 'A complete JavaScript implementation', +// 'https://www.npmjs.com/package/yaml' ] } +``` + +## YAML.stringify + +```js +import YAML from 'yaml' + +YAML.stringify(3.14159) +// '3.14159\n' + +YAML.stringify([true, false, 'maybe', null]) +// `- true +// - false +// - maybe +// - null +// ` + +YAML.stringify({ number: 3, plain: 'string', block: 'two\nlines\n' }) +// `number: 3 +// plain: string +// block: > +// two +// +// lines +// ` +``` + +--- + +Browser testing provided by: + +<a href="https://www.browserstack.com/open-source"> +<img width=200 src="https://eemeli.org/yaml/images/browserstack.svg" /> +</a> diff --git a/node_modules/yaml/browser/dist/PlainValue-b8036b75.js b/node_modules/yaml/browser/dist/PlainValue-b8036b75.js new file mode 100644 index 0000000..2a4255a --- /dev/null +++ b/node_modules/yaml/browser/dist/PlainValue-b8036b75.js @@ -0,0 +1,1275 @@ +function _typeof(obj) { + "@babel/helpers - typeof"; + + if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") { + _typeof = function (obj) { + return typeof obj; + }; + } else { + _typeof = function (obj) { + return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; + }; + } + + return _typeof(obj); +} + +function _classCallCheck(instance, Constructor) { + if (!(instance instanceof Constructor)) { + throw new TypeError("Cannot call a class as a function"); + } +} + +function _defineProperties(target, props) { + for (var i = 0; i < props.length; i++) { + var descriptor = props[i]; + descriptor.enumerable = descriptor.enumerable || false; + descriptor.configurable = true; + if ("value" in descriptor) descriptor.writable = true; + Object.defineProperty(target, descriptor.key, descriptor); + } +} + +function _createClass(Constructor, protoProps, staticProps) { + if (protoProps) _defineProperties(Constructor.prototype, protoProps); + if (staticProps) _defineProperties(Constructor, staticProps); + return Constructor; +} + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + +function _inherits(subClass, superClass) { + if (typeof superClass !== "function" && superClass !== null) { + throw new TypeError("Super expression must either be null or a function"); + } + + subClass.prototype = Object.create(superClass && superClass.prototype, { + constructor: { + value: subClass, + writable: true, + configurable: true + } + }); + if (superClass) _setPrototypeOf(subClass, superClass); +} + +function _getPrototypeOf(o) { + _getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) { + return o.__proto__ || Object.getPrototypeOf(o); + }; + return _getPrototypeOf(o); +} + +function _setPrototypeOf(o, p) { + _setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) { + o.__proto__ = p; + return o; + }; + + return _setPrototypeOf(o, p); +} + +function _isNativeReflectConstruct() { + if (typeof Reflect === "undefined" || !Reflect.construct) return false; + if (Reflect.construct.sham) return false; + if (typeof Proxy === "function") return true; + + try { + Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {})); + return true; + } catch (e) { + return false; + } +} + +function _construct(Parent, args, Class) { + if (_isNativeReflectConstruct()) { + _construct = Reflect.construct; + } else { + _construct = function _construct(Parent, args, Class) { + var a = [null]; + a.push.apply(a, args); + var Constructor = Function.bind.apply(Parent, a); + var instance = new Constructor(); + if (Class) _setPrototypeOf(instance, Class.prototype); + return instance; + }; + } + + return _construct.apply(null, arguments); +} + +function _isNativeFunction(fn) { + return Function.toString.call(fn).indexOf("[native code]") !== -1; +} + +function _wrapNativeSuper(Class) { + var _cache = typeof Map === "function" ? new Map() : undefined; + + _wrapNativeSuper = function _wrapNativeSuper(Class) { + if (Class === null || !_isNativeFunction(Class)) return Class; + + if (typeof Class !== "function") { + throw new TypeError("Super expression must either be null or a function"); + } + + if (typeof _cache !== "undefined") { + if (_cache.has(Class)) return _cache.get(Class); + + _cache.set(Class, Wrapper); + } + + function Wrapper() { + return _construct(Class, arguments, _getPrototypeOf(this).constructor); + } + + Wrapper.prototype = Object.create(Class.prototype, { + constructor: { + value: Wrapper, + enumerable: false, + writable: true, + configurable: true + } + }); + return _setPrototypeOf(Wrapper, Class); + }; + + return _wrapNativeSuper(Class); +} + +function _assertThisInitialized(self) { + if (self === void 0) { + throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); + } + + return self; +} + +function _possibleConstructorReturn(self, call) { + if (call && (typeof call === "object" || typeof call === "function")) { + return call; + } + + return _assertThisInitialized(self); +} + +function _createSuper(Derived) { + var hasNativeReflectConstruct = _isNativeReflectConstruct(); + + return function _createSuperInternal() { + var Super = _getPrototypeOf(Derived), + result; + + if (hasNativeReflectConstruct) { + var NewTarget = _getPrototypeOf(this).constructor; + + result = Reflect.construct(Super, arguments, NewTarget); + } else { + result = Super.apply(this, arguments); + } + + return _possibleConstructorReturn(this, result); + }; +} + +function _superPropBase(object, property) { + while (!Object.prototype.hasOwnProperty.call(object, property)) { + object = _getPrototypeOf(object); + if (object === null) break; + } + + return object; +} + +function _get(target, property, receiver) { + if (typeof Reflect !== "undefined" && Reflect.get) { + _get = Reflect.get; + } else { + _get = function _get(target, property, receiver) { + var base = _superPropBase(target, property); + + if (!base) return; + var desc = Object.getOwnPropertyDescriptor(base, property); + + if (desc.get) { + return desc.get.call(receiver); + } + + return desc.value; + }; + } + + return _get(target, property, receiver || target); +} + +function _slicedToArray(arr, i) { + return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest(); +} + +function _toArray(arr) { + return _arrayWithHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableRest(); +} + +function _arrayWithHoles(arr) { + if (Array.isArray(arr)) return arr; +} + +function _iterableToArray(iter) { + if (typeof Symbol !== "undefined" && Symbol.iterator in Object(iter)) return Array.from(iter); +} + +function _iterableToArrayLimit(arr, i) { + if (typeof Symbol === "undefined" || !(Symbol.iterator in Object(arr))) return; + var _arr = []; + var _n = true; + var _d = false; + var _e = undefined; + + try { + for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { + _arr.push(_s.value); + + if (i && _arr.length === i) break; + } + } catch (err) { + _d = true; + _e = err; + } finally { + try { + if (!_n && _i["return"] != null) _i["return"](); + } finally { + if (_d) throw _e; + } + } + + return _arr; +} + +function _unsupportedIterableToArray(o, minLen) { + if (!o) return; + if (typeof o === "string") return _arrayLikeToArray(o, minLen); + var n = Object.prototype.toString.call(o).slice(8, -1); + if (n === "Object" && o.constructor) n = o.constructor.name; + if (n === "Map" || n === "Set") return Array.from(o); + if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); +} + +function _arrayLikeToArray(arr, len) { + if (len == null || len > arr.length) len = arr.length; + + for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; + + return arr2; +} + +function _nonIterableRest() { + throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); +} + +function _createForOfIteratorHelper(o, allowArrayLike) { + var it; + + if (typeof Symbol === "undefined" || o[Symbol.iterator] == null) { + if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { + if (it) o = it; + var i = 0; + + var F = function () {}; + + return { + s: F, + n: function () { + if (i >= o.length) return { + done: true + }; + return { + done: false, + value: o[i++] + }; + }, + e: function (e) { + throw e; + }, + f: F + }; + } + + throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); + } + + var normalCompletion = true, + didErr = false, + err; + return { + s: function () { + it = o[Symbol.iterator](); + }, + n: function () { + var step = it.next(); + normalCompletion = step.done; + return step; + }, + e: function (e) { + didErr = true; + err = e; + }, + f: function () { + try { + if (!normalCompletion && it.return != null) it.return(); + } finally { + if (didErr) throw err; + } + } + }; +} + +var Char = { + ANCHOR: '&', + COMMENT: '#', + TAG: '!', + DIRECTIVES_END: '-', + DOCUMENT_END: '.' +}; +var Type = { + ALIAS: 'ALIAS', + BLANK_LINE: 'BLANK_LINE', + BLOCK_FOLDED: 'BLOCK_FOLDED', + BLOCK_LITERAL: 'BLOCK_LITERAL', + COMMENT: 'COMMENT', + DIRECTIVE: 'DIRECTIVE', + DOCUMENT: 'DOCUMENT', + FLOW_MAP: 'FLOW_MAP', + FLOW_SEQ: 'FLOW_SEQ', + MAP: 'MAP', + MAP_KEY: 'MAP_KEY', + MAP_VALUE: 'MAP_VALUE', + PLAIN: 'PLAIN', + QUOTE_DOUBLE: 'QUOTE_DOUBLE', + QUOTE_SINGLE: 'QUOTE_SINGLE', + SEQ: 'SEQ', + SEQ_ITEM: 'SEQ_ITEM' +}; +var defaultTagPrefix = 'tag:yaml.org,2002:'; +var defaultTags = { + MAP: 'tag:yaml.org,2002:map', + SEQ: 'tag:yaml.org,2002:seq', + STR: 'tag:yaml.org,2002:str' +}; + +function findLineStarts(src) { + var ls = [0]; + var offset = src.indexOf('\n'); + + while (offset !== -1) { + offset += 1; + ls.push(offset); + offset = src.indexOf('\n', offset); + } + + return ls; +} + +function getSrcInfo(cst) { + var lineStarts, src; + + if (typeof cst === 'string') { + lineStarts = findLineStarts(cst); + src = cst; + } else { + if (Array.isArray(cst)) cst = cst[0]; + + if (cst && cst.context) { + if (!cst.lineStarts) cst.lineStarts = findLineStarts(cst.context.src); + lineStarts = cst.lineStarts; + src = cst.context.src; + } + } + + return { + lineStarts: lineStarts, + src: src + }; +} +/** + * @typedef {Object} LinePos - One-indexed position in the source + * @property {number} line + * @property {number} col + */ + +/** + * Determine the line/col position matching a character offset. + * + * Accepts a source string or a CST document as the second parameter. With + * the latter, starting indices for lines are cached in the document as + * `lineStarts: number[]`. + * + * Returns a one-indexed `{ line, col }` location if found, or + * `undefined` otherwise. + * + * @param {number} offset + * @param {string|Document|Document[]} cst + * @returns {?LinePos} + */ + + +function getLinePos(offset, cst) { + if (typeof offset !== 'number' || offset < 0) return null; + + var _getSrcInfo = getSrcInfo(cst), + lineStarts = _getSrcInfo.lineStarts, + src = _getSrcInfo.src; + + if (!lineStarts || !src || offset > src.length) return null; + + for (var i = 0; i < lineStarts.length; ++i) { + var start = lineStarts[i]; + + if (offset < start) { + return { + line: i, + col: offset - lineStarts[i - 1] + 1 + }; + } + + if (offset === start) return { + line: i + 1, + col: 1 + }; + } + + var line = lineStarts.length; + return { + line: line, + col: offset - lineStarts[line - 1] + 1 + }; +} +/** + * Get a specified line from the source. + * + * Accepts a source string or a CST document as the second parameter. With + * the latter, starting indices for lines are cached in the document as + * `lineStarts: number[]`. + * + * Returns the line as a string if found, or `null` otherwise. + * + * @param {number} line One-indexed line number + * @param {string|Document|Document[]} cst + * @returns {?string} + */ + +function getLine(line, cst) { + var _getSrcInfo2 = getSrcInfo(cst), + lineStarts = _getSrcInfo2.lineStarts, + src = _getSrcInfo2.src; + + if (!lineStarts || !(line >= 1) || line > lineStarts.length) return null; + var start = lineStarts[line - 1]; + var end = lineStarts[line]; // undefined for last line; that's ok for slice() + + while (end && end > start && src[end - 1] === '\n') { + --end; + } + + return src.slice(start, end); +} +/** + * Pretty-print the starting line from the source indicated by the range `pos` + * + * Trims output to `maxWidth` chars while keeping the starting column visible, + * using `…` at either end to indicate dropped characters. + * + * Returns a two-line string (or `null`) with `\n` as separator; the second line + * will hold appropriately indented `^` marks indicating the column range. + * + * @param {Object} pos + * @param {LinePos} pos.start + * @param {LinePos} [pos.end] + * @param {string|Document|Document[]*} cst + * @param {number} [maxWidth=80] + * @returns {?string} + */ + +function getPrettyContext(_ref, cst) { + var start = _ref.start, + end = _ref.end; + var maxWidth = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 80; + var src = getLine(start.line, cst); + if (!src) return null; + var col = start.col; + + if (src.length > maxWidth) { + if (col <= maxWidth - 10) { + src = src.substr(0, maxWidth - 1) + '…'; + } else { + var halfWidth = Math.round(maxWidth / 2); + if (src.length > col + halfWidth) src = src.substr(0, col + halfWidth - 1) + '…'; + col -= src.length - maxWidth; + src = '…' + src.substr(1 - maxWidth); + } + } + + var errLen = 1; + var errEnd = ''; + + if (end) { + if (end.line === start.line && col + (end.col - start.col) <= maxWidth + 1) { + errLen = end.col - start.col; + } else { + errLen = Math.min(src.length + 1, maxWidth) - col; + errEnd = '…'; + } + } + + var offset = col > 1 ? ' '.repeat(col - 1) : ''; + var err = '^'.repeat(errLen); + return "".concat(src, "\n").concat(offset).concat(err).concat(errEnd); +} + +var Range = /*#__PURE__*/function () { + function Range(start, end) { + _classCallCheck(this, Range); + + this.start = start; + this.end = end || start; + } + + _createClass(Range, [{ + key: "isEmpty", + value: function isEmpty() { + return typeof this.start !== 'number' || !this.end || this.end <= this.start; + } + /** + * Set `origStart` and `origEnd` to point to the original source range for + * this node, which may differ due to dropped CR characters. + * + * @param {number[]} cr - Positions of dropped CR characters + * @param {number} offset - Starting index of `cr` from the last call + * @returns {number} - The next offset, matching the one found for `origStart` + */ + + }, { + key: "setOrigRange", + value: function setOrigRange(cr, offset) { + var start = this.start, + end = this.end; + + if (cr.length === 0 || end <= cr[0]) { + this.origStart = start; + this.origEnd = end; + return offset; + } + + var i = offset; + + while (i < cr.length) { + if (cr[i] > start) break;else ++i; + } + + this.origStart = start + i; + var nextOffset = i; + + while (i < cr.length) { + // if end was at \n, it should now be at \r + if (cr[i] >= end) break;else ++i; + } + + this.origEnd = end + i; + return nextOffset; + } + }], [{ + key: "copy", + value: function copy(orig) { + return new Range(orig.start, orig.end); + } + }]); + + return Range; +}(); + +/** Root class of all nodes */ + +var Node = /*#__PURE__*/function () { + function Node(type, props, context) { + _classCallCheck(this, Node); + + Object.defineProperty(this, 'context', { + value: context || null, + writable: true + }); + this.error = null; + this.range = null; + this.valueRange = null; + this.props = props || []; + this.type = type; + this.value = null; + } + + _createClass(Node, [{ + key: "getPropValue", + value: function getPropValue(idx, key, skipKey) { + if (!this.context) return null; + var src = this.context.src; + var prop = this.props[idx]; + return prop && src[prop.start] === key ? src.slice(prop.start + (skipKey ? 1 : 0), prop.end) : null; + } + }, { + key: "anchor", + get: function get() { + for (var i = 0; i < this.props.length; ++i) { + var anchor = this.getPropValue(i, Char.ANCHOR, true); + if (anchor != null) return anchor; + } + + return null; + } + }, { + key: "comment", + get: function get() { + var comments = []; + + for (var i = 0; i < this.props.length; ++i) { + var comment = this.getPropValue(i, Char.COMMENT, true); + if (comment != null) comments.push(comment); + } + + return comments.length > 0 ? comments.join('\n') : null; + } + }, { + key: "commentHasRequiredWhitespace", + value: function commentHasRequiredWhitespace(start) { + var src = this.context.src; + if (this.header && start === this.header.end) return false; + if (!this.valueRange) return false; + var end = this.valueRange.end; + return start !== end || Node.atBlank(src, end - 1); + } + }, { + key: "hasComment", + get: function get() { + if (this.context) { + var src = this.context.src; + + for (var i = 0; i < this.props.length; ++i) { + if (src[this.props[i].start] === Char.COMMENT) return true; + } + } + + return false; + } + }, { + key: "hasProps", + get: function get() { + if (this.context) { + var src = this.context.src; + + for (var i = 0; i < this.props.length; ++i) { + if (src[this.props[i].start] !== Char.COMMENT) return true; + } + } + + return false; + } + }, { + key: "includesTrailingLines", + get: function get() { + return false; + } + }, { + key: "jsonLike", + get: function get() { + var jsonLikeTypes = [Type.FLOW_MAP, Type.FLOW_SEQ, Type.QUOTE_DOUBLE, Type.QUOTE_SINGLE]; + return jsonLikeTypes.indexOf(this.type) !== -1; + } + }, { + key: "rangeAsLinePos", + get: function get() { + if (!this.range || !this.context) return undefined; + var start = getLinePos(this.range.start, this.context.root); + if (!start) return undefined; + var end = getLinePos(this.range.end, this.context.root); + return { + start: start, + end: end + }; + } + }, { + key: "rawValue", + get: function get() { + if (!this.valueRange || !this.context) return null; + var _this$valueRange = this.valueRange, + start = _this$valueRange.start, + end = _this$valueRange.end; + return this.context.src.slice(start, end); + } + }, { + key: "tag", + get: function get() { + for (var i = 0; i < this.props.length; ++i) { + var tag = this.getPropValue(i, Char.TAG, false); + + if (tag != null) { + if (tag[1] === '<') { + return { + verbatim: tag.slice(2, -1) + }; + } else { + // eslint-disable-next-line no-unused-vars + var _tag$match = tag.match(/^(.*!)([^!]*)$/), + _tag$match2 = _slicedToArray(_tag$match, 3); + _tag$match2[0]; + var handle = _tag$match2[1], + suffix = _tag$match2[2]; + + return { + handle: handle, + suffix: suffix + }; + } + } + } + + return null; + } + }, { + key: "valueRangeContainsNewline", + get: function get() { + if (!this.valueRange || !this.context) return false; + var _this$valueRange2 = this.valueRange, + start = _this$valueRange2.start, + end = _this$valueRange2.end; + var src = this.context.src; + + for (var i = start; i < end; ++i) { + if (src[i] === '\n') return true; + } + + return false; + } + }, { + key: "parseComment", + value: function parseComment(start) { + var src = this.context.src; + + if (src[start] === Char.COMMENT) { + var end = Node.endOfLine(src, start + 1); + var commentRange = new Range(start, end); + this.props.push(commentRange); + return end; + } + + return start; + } + /** + * Populates the `origStart` and `origEnd` values of all ranges for this + * node. Extended by child classes to handle descendant nodes. + * + * @param {number[]} cr - Positions of dropped CR characters + * @param {number} offset - Starting index of `cr` from the last call + * @returns {number} - The next offset, matching the one found for `origStart` + */ + + }, { + key: "setOrigRanges", + value: function setOrigRanges(cr, offset) { + if (this.range) offset = this.range.setOrigRange(cr, offset); + if (this.valueRange) this.valueRange.setOrigRange(cr, offset); + this.props.forEach(function (prop) { + return prop.setOrigRange(cr, offset); + }); + return offset; + } + }, { + key: "toString", + value: function toString() { + var src = this.context.src, + range = this.range, + value = this.value; + if (value != null) return value; + var str = src.slice(range.start, range.end); + return Node.addStringTerminator(src, range.end, str); + } + }], [{ + key: "addStringTerminator", + value: function addStringTerminator(src, offset, str) { + if (str[str.length - 1] === '\n') return str; + var next = Node.endOfWhiteSpace(src, offset); + return next >= src.length || src[next] === '\n' ? str + '\n' : str; + } // ^(---|...) + + }, { + key: "atDocumentBoundary", + value: function atDocumentBoundary(src, offset, sep) { + var ch0 = src[offset]; + if (!ch0) return true; + var prev = src[offset - 1]; + if (prev && prev !== '\n') return false; + + if (sep) { + if (ch0 !== sep) return false; + } else { + if (ch0 !== Char.DIRECTIVES_END && ch0 !== Char.DOCUMENT_END) return false; + } + + var ch1 = src[offset + 1]; + var ch2 = src[offset + 2]; + if (ch1 !== ch0 || ch2 !== ch0) return false; + var ch3 = src[offset + 3]; + return !ch3 || ch3 === '\n' || ch3 === '\t' || ch3 === ' '; + } + }, { + key: "endOfIdentifier", + value: function endOfIdentifier(src, offset) { + var ch = src[offset]; + var isVerbatim = ch === '<'; + var notOk = isVerbatim ? ['\n', '\t', ' ', '>'] : ['\n', '\t', ' ', '[', ']', '{', '}', ',']; + + while (ch && notOk.indexOf(ch) === -1) { + ch = src[offset += 1]; + } + + if (isVerbatim && ch === '>') offset += 1; + return offset; + } + }, { + key: "endOfIndent", + value: function endOfIndent(src, offset) { + var ch = src[offset]; + + while (ch === ' ') { + ch = src[offset += 1]; + } + + return offset; + } + }, { + key: "endOfLine", + value: function endOfLine(src, offset) { + var ch = src[offset]; + + while (ch && ch !== '\n') { + ch = src[offset += 1]; + } + + return offset; + } + }, { + key: "endOfWhiteSpace", + value: function endOfWhiteSpace(src, offset) { + var ch = src[offset]; + + while (ch === '\t' || ch === ' ') { + ch = src[offset += 1]; + } + + return offset; + } + }, { + key: "startOfLine", + value: function startOfLine(src, offset) { + var ch = src[offset - 1]; + if (ch === '\n') return offset; + + while (ch && ch !== '\n') { + ch = src[offset -= 1]; + } + + return offset + 1; + } + /** + * End of indentation, or null if the line's indent level is not more + * than `indent` + * + * @param {string} src + * @param {number} indent + * @param {number} lineStart + * @returns {?number} + */ + + }, { + key: "endOfBlockIndent", + value: function endOfBlockIndent(src, indent, lineStart) { + var inEnd = Node.endOfIndent(src, lineStart); + + if (inEnd > lineStart + indent) { + return inEnd; + } else { + var wsEnd = Node.endOfWhiteSpace(src, inEnd); + var ch = src[wsEnd]; + if (!ch || ch === '\n') return wsEnd; + } + + return null; + } + }, { + key: "atBlank", + value: function atBlank(src, offset, endAsBlank) { + var ch = src[offset]; + return ch === '\n' || ch === '\t' || ch === ' ' || endAsBlank && !ch; + } + }, { + key: "nextNodeIsIndented", + value: function nextNodeIsIndented(ch, indentDiff, indicatorAsIndent) { + if (!ch || indentDiff < 0) return false; + if (indentDiff > 0) return true; + return indicatorAsIndent && ch === '-'; + } // should be at line or string end, or at next non-whitespace char + + }, { + key: "normalizeOffset", + value: function normalizeOffset(src, offset) { + var ch = src[offset]; + return !ch ? offset : ch !== '\n' && src[offset - 1] === '\n' ? offset - 1 : Node.endOfWhiteSpace(src, offset); + } // fold single newline into space, multiple newlines to N - 1 newlines + // presumes src[offset] === '\n' + + }, { + key: "foldNewline", + value: function foldNewline(src, offset, indent) { + var inCount = 0; + var error = false; + var fold = ''; + var ch = src[offset + 1]; + + while (ch === ' ' || ch === '\t' || ch === '\n') { + switch (ch) { + case '\n': + inCount = 0; + offset += 1; + fold += '\n'; + break; + + case '\t': + if (inCount <= indent) error = true; + offset = Node.endOfWhiteSpace(src, offset + 2) - 1; + break; + + case ' ': + inCount += 1; + offset += 1; + break; + } + + ch = src[offset + 1]; + } + + if (!fold) fold = ' '; + if (ch && inCount <= indent) error = true; + return { + fold: fold, + offset: offset, + error: error + }; + } + }]); + + return Node; +}(); + +var YAMLError = /*#__PURE__*/function (_Error) { + _inherits(YAMLError, _Error); + + var _super = _createSuper(YAMLError); + + function YAMLError(name, source, message) { + var _this; + + _classCallCheck(this, YAMLError); + + if (!message || !(source instanceof Node)) throw new Error("Invalid arguments for new ".concat(name)); + _this = _super.call(this); + _this.name = name; + _this.message = message; + _this.source = source; + return _this; + } + + _createClass(YAMLError, [{ + key: "makePretty", + value: function makePretty() { + if (!this.source) return; + this.nodeType = this.source.type; + var cst = this.source.context && this.source.context.root; + + if (typeof this.offset === 'number') { + this.range = new Range(this.offset, this.offset + 1); + var start = cst && getLinePos(this.offset, cst); + + if (start) { + var end = { + line: start.line, + col: start.col + 1 + }; + this.linePos = { + start: start, + end: end + }; + } + + delete this.offset; + } else { + this.range = this.source.range; + this.linePos = this.source.rangeAsLinePos; + } + + if (this.linePos) { + var _this$linePos$start = this.linePos.start, + line = _this$linePos$start.line, + col = _this$linePos$start.col; + this.message += " at line ".concat(line, ", column ").concat(col); + var ctx = cst && getPrettyContext(this.linePos, cst); + if (ctx) this.message += ":\n\n".concat(ctx, "\n"); + } + + delete this.source; + } + }]); + + return YAMLError; +}( /*#__PURE__*/_wrapNativeSuper(Error)); +var YAMLReferenceError = /*#__PURE__*/function (_YAMLError) { + _inherits(YAMLReferenceError, _YAMLError); + + var _super2 = _createSuper(YAMLReferenceError); + + function YAMLReferenceError(source, message) { + _classCallCheck(this, YAMLReferenceError); + + return _super2.call(this, 'YAMLReferenceError', source, message); + } + + return YAMLReferenceError; +}(YAMLError); +var YAMLSemanticError = /*#__PURE__*/function (_YAMLError2) { + _inherits(YAMLSemanticError, _YAMLError2); + + var _super3 = _createSuper(YAMLSemanticError); + + function YAMLSemanticError(source, message) { + _classCallCheck(this, YAMLSemanticError); + + return _super3.call(this, 'YAMLSemanticError', source, message); + } + + return YAMLSemanticError; +}(YAMLError); +var YAMLSyntaxError = /*#__PURE__*/function (_YAMLError3) { + _inherits(YAMLSyntaxError, _YAMLError3); + + var _super4 = _createSuper(YAMLSyntaxError); + + function YAMLSyntaxError(source, message) { + _classCallCheck(this, YAMLSyntaxError); + + return _super4.call(this, 'YAMLSyntaxError', source, message); + } + + return YAMLSyntaxError; +}(YAMLError); +var YAMLWarning = /*#__PURE__*/function (_YAMLError4) { + _inherits(YAMLWarning, _YAMLError4); + + var _super5 = _createSuper(YAMLWarning); + + function YAMLWarning(source, message) { + _classCallCheck(this, YAMLWarning); + + return _super5.call(this, 'YAMLWarning', source, message); + } + + return YAMLWarning; +}(YAMLError); + +var PlainValue = /*#__PURE__*/function (_Node) { + _inherits(PlainValue, _Node); + + var _super = _createSuper(PlainValue); + + function PlainValue() { + _classCallCheck(this, PlainValue); + + return _super.apply(this, arguments); + } + + _createClass(PlainValue, [{ + key: "strValue", + get: function get() { + if (!this.valueRange || !this.context) return null; + var _this$valueRange = this.valueRange, + start = _this$valueRange.start, + end = _this$valueRange.end; + var src = this.context.src; + var ch = src[end - 1]; + + while (start < end && (ch === '\n' || ch === '\t' || ch === ' ')) { + ch = src[--end - 1]; + } + + var str = ''; + + for (var i = start; i < end; ++i) { + var _ch = src[i]; + + if (_ch === '\n') { + var _Node$foldNewline = Node.foldNewline(src, i, -1), + fold = _Node$foldNewline.fold, + offset = _Node$foldNewline.offset; + + str += fold; + i = offset; + } else if (_ch === ' ' || _ch === '\t') { + // trim trailing whitespace + var wsStart = i; + var next = src[i + 1]; + + while (i < end && (next === ' ' || next === '\t')) { + i += 1; + next = src[i + 1]; + } + + if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : _ch; + } else { + str += _ch; + } + } + + var ch0 = src[start]; + + switch (ch0) { + case '\t': + { + var msg = 'Plain value cannot start with a tab character'; + var errors = [new YAMLSemanticError(this, msg)]; + return { + errors: errors, + str: str + }; + } + + case '@': + case '`': + { + var _msg = "Plain value cannot start with reserved character ".concat(ch0); + + var _errors = [new YAMLSemanticError(this, _msg)]; + return { + errors: _errors, + str: str + }; + } + + default: + return str; + } + } + }, { + key: "parseBlockValue", + value: function parseBlockValue(start) { + var _this$context = this.context, + indent = _this$context.indent, + inFlow = _this$context.inFlow, + src = _this$context.src; + var offset = start; + var valueEnd = start; + + for (var ch = src[offset]; ch === '\n'; ch = src[offset]) { + if (Node.atDocumentBoundary(src, offset + 1)) break; + var end = Node.endOfBlockIndent(src, indent, offset + 1); + if (end === null || src[end] === '#') break; + + if (src[end] === '\n') { + offset = end; + } else { + valueEnd = PlainValue.endOfLine(src, end, inFlow); + offset = valueEnd; + } + } + + if (this.valueRange.isEmpty()) this.valueRange.start = start; + this.valueRange.end = valueEnd; + return valueEnd; + } + /** + * Parses a plain value from the source + * + * Accepted forms are: + * ``` + * #comment + * + * first line + * + * first line #comment + * + * first line + * block + * lines + * + * #comment + * block + * lines + * ``` + * where block lines are empty or have an indent level greater than `indent`. + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar, may be `\n` + */ + + }, { + key: "parse", + value: function parse(context, start) { + this.context = context; + var inFlow = context.inFlow, + src = context.src; + var offset = start; + var ch = src[offset]; + + if (ch && ch !== '#' && ch !== '\n') { + offset = PlainValue.endOfLine(src, start, inFlow); + } + + this.valueRange = new Range(start, offset); + offset = Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + + if (!this.hasComment || this.valueRange.isEmpty()) { + offset = this.parseBlockValue(offset); + } + + return offset; + } + }], [{ + key: "endOfLine", + value: function endOfLine(src, start, inFlow) { + var ch = src[start]; + var offset = start; + + while (ch && ch !== '\n') { + if (inFlow && (ch === '[' || ch === ']' || ch === '{' || ch === '}' || ch === ',')) break; + var next = src[offset + 1]; + if (ch === ':' && (!next || next === '\n' || next === '\t' || next === ' ' || inFlow && next === ',')) break; + if ((ch === ' ' || ch === '\t') && next === '#') break; + offset += 1; + ch = next; + } + + return offset; + } + }]); + + return PlainValue; +}(Node); + +export { Char as C, Node as N, PlainValue as P, Range as R, Type as T, YAMLSyntaxError as Y, _createForOfIteratorHelper as _, _typeof as a, _createClass as b, _classCallCheck as c, defaultTagPrefix as d, _defineProperty as e, YAMLWarning as f, YAMLSemanticError as g, _slicedToArray as h, YAMLError as i, _inherits as j, _createSuper as k, _get as l, _getPrototypeOf as m, defaultTags as n, YAMLReferenceError as o, _assertThisInitialized as p, _toArray as q, _possibleConstructorReturn as r }; diff --git a/node_modules/yaml/browser/dist/Schema-e94716c8.js b/node_modules/yaml/browser/dist/Schema-e94716c8.js new file mode 100644 index 0000000..f18527d --- /dev/null +++ b/node_modules/yaml/browser/dist/Schema-e94716c8.js @@ -0,0 +1,682 @@ +import { _ as _createForOfIteratorHelper, h as _slicedToArray, a as _typeof, b as _createClass, e as _defineProperty, c as _classCallCheck, d as defaultTagPrefix, n as defaultTags } from './PlainValue-b8036b75.js'; +import { d as YAMLMap, g as resolveMap, Y as YAMLSeq, h as resolveSeq, j as resolveString, c as stringifyString, s as strOptions, S as Scalar, n as nullOptions, a as boolOptions, i as intOptions, k as stringifyNumber, N as Node, A as Alias, P as Pair } from './resolveSeq-492ab440.js'; +import { b as binary, o as omap, p as pairs, s as set, i as intTime, f as floatTime, t as timestamp, a as warnOptionDeprecation } from './warnings-df54cb69.js'; + +function createMap(schema, obj, ctx) { + var map = new YAMLMap(schema); + + if (obj instanceof Map) { + var _iterator = _createForOfIteratorHelper(obj), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var _step$value = _slicedToArray(_step.value, 2), + key = _step$value[0], + value = _step$value[1]; + + map.items.push(schema.createPair(key, value, ctx)); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + } else if (obj && _typeof(obj) === 'object') { + for (var _i = 0, _Object$keys = Object.keys(obj); _i < _Object$keys.length; _i++) { + var _key = _Object$keys[_i]; + map.items.push(schema.createPair(_key, obj[_key], ctx)); + } + } + + if (typeof schema.sortMapEntries === 'function') { + map.items.sort(schema.sortMapEntries); + } + + return map; +} + +var map = { + createNode: createMap, + default: true, + nodeClass: YAMLMap, + tag: 'tag:yaml.org,2002:map', + resolve: resolveMap +}; + +function createSeq(schema, obj, ctx) { + var seq = new YAMLSeq(schema); + + if (obj && obj[Symbol.iterator]) { + var _iterator = _createForOfIteratorHelper(obj), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var it = _step.value; + var v = schema.createNode(it, ctx.wrapScalars, null, ctx); + seq.items.push(v); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + } + + return seq; +} + +var seq = { + createNode: createSeq, + default: true, + nodeClass: YAMLSeq, + tag: 'tag:yaml.org,2002:seq', + resolve: resolveSeq +}; + +var string = { + identify: function identify(value) { + return typeof value === 'string'; + }, + default: true, + tag: 'tag:yaml.org,2002:str', + resolve: resolveString, + stringify: function stringify(item, ctx, onComment, onChompKeep) { + ctx = Object.assign({ + actualString: true + }, ctx); + return stringifyString(item, ctx, onComment, onChompKeep); + }, + options: strOptions +}; + +var failsafe = [map, seq, string]; + +/* global BigInt */ + +var intIdentify$2 = function intIdentify(value) { + return typeof value === 'bigint' || Number.isInteger(value); +}; + +var intResolve$1 = function intResolve(src, part, radix) { + return intOptions.asBigInt ? BigInt(src) : parseInt(part, radix); +}; + +function intStringify$1(node, radix, prefix) { + var value = node.value; + if (intIdentify$2(value) && value >= 0) return prefix + value.toString(radix); + return stringifyNumber(node); +} + +var nullObj = { + identify: function identify(value) { + return value == null; + }, + createNode: function createNode(schema, value, ctx) { + return ctx.wrapScalars ? new Scalar(null) : null; + }, + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^(?:~|[Nn]ull|NULL)?$/, + resolve: function resolve() { + return null; + }, + options: nullOptions, + stringify: function stringify() { + return nullOptions.nullStr; + } +}; +var boolObj = { + identify: function identify(value) { + return typeof value === 'boolean'; + }, + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, + resolve: function resolve(str) { + return str[0] === 't' || str[0] === 'T'; + }, + options: boolOptions, + stringify: function stringify(_ref) { + var value = _ref.value; + return value ? boolOptions.trueStr : boolOptions.falseStr; + } +}; +var octObj = { + identify: function identify(value) { + return intIdentify$2(value) && value >= 0; + }, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'OCT', + test: /^0o([0-7]+)$/, + resolve: function resolve(str, oct) { + return intResolve$1(str, oct, 8); + }, + options: intOptions, + stringify: function stringify(node) { + return intStringify$1(node, 8, '0o'); + } +}; +var intObj = { + identify: intIdentify$2, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^[-+]?[0-9]+$/, + resolve: function resolve(str) { + return intResolve$1(str, str, 10); + }, + options: intOptions, + stringify: stringifyNumber +}; +var hexObj = { + identify: function identify(value) { + return intIdentify$2(value) && value >= 0; + }, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'HEX', + test: /^0x([0-9a-fA-F]+)$/, + resolve: function resolve(str, hex) { + return intResolve$1(str, hex, 16); + }, + options: intOptions, + stringify: function stringify(node) { + return intStringify$1(node, 16, '0x'); + } +}; +var nanObj = { + identify: function identify(value) { + return typeof value === 'number'; + }, + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^(?:[-+]?\.inf|(\.nan))$/i, + resolve: function resolve(str, nan) { + return nan ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY; + }, + stringify: stringifyNumber +}; +var expObj = { + identify: function identify(value) { + return typeof value === 'number'; + }, + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'EXP', + test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, + resolve: function resolve(str) { + return parseFloat(str); + }, + stringify: function stringify(_ref2) { + var value = _ref2.value; + return Number(value).toExponential(); + } +}; +var floatObj = { + identify: function identify(value) { + return typeof value === 'number'; + }, + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^[-+]?(?:\.([0-9]+)|[0-9]+\.([0-9]*))$/, + resolve: function resolve(str, frac1, frac2) { + var frac = frac1 || frac2; + var node = new Scalar(parseFloat(str)); + if (frac && frac[frac.length - 1] === '0') node.minFractionDigits = frac.length; + return node; + }, + stringify: stringifyNumber +}; +var core = failsafe.concat([nullObj, boolObj, octObj, intObj, hexObj, nanObj, expObj, floatObj]); + +/* global BigInt */ + +var intIdentify$1 = function intIdentify(value) { + return typeof value === 'bigint' || Number.isInteger(value); +}; + +var stringifyJSON = function stringifyJSON(_ref) { + var value = _ref.value; + return JSON.stringify(value); +}; + +var json = [map, seq, { + identify: function identify(value) { + return typeof value === 'string'; + }, + default: true, + tag: 'tag:yaml.org,2002:str', + resolve: resolveString, + stringify: stringifyJSON +}, { + identify: function identify(value) { + return value == null; + }, + createNode: function createNode(schema, value, ctx) { + return ctx.wrapScalars ? new Scalar(null) : null; + }, + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^null$/, + resolve: function resolve() { + return null; + }, + stringify: stringifyJSON +}, { + identify: function identify(value) { + return typeof value === 'boolean'; + }, + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^true|false$/, + resolve: function resolve(str) { + return str === 'true'; + }, + stringify: stringifyJSON +}, { + identify: intIdentify$1, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^-?(?:0|[1-9][0-9]*)$/, + resolve: function resolve(str) { + return intOptions.asBigInt ? BigInt(str) : parseInt(str, 10); + }, + stringify: function stringify(_ref2) { + var value = _ref2.value; + return intIdentify$1(value) ? value.toString() : JSON.stringify(value); + } +}, { + identify: function identify(value) { + return typeof value === 'number'; + }, + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, + resolve: function resolve(str) { + return parseFloat(str); + }, + stringify: stringifyJSON +}]; + +json.scalarFallback = function (str) { + throw new SyntaxError("Unresolved plain scalar ".concat(JSON.stringify(str))); +}; + +/* global BigInt */ + +var boolStringify = function boolStringify(_ref) { + var value = _ref.value; + return value ? boolOptions.trueStr : boolOptions.falseStr; +}; + +var intIdentify = function intIdentify(value) { + return typeof value === 'bigint' || Number.isInteger(value); +}; + +function intResolve(sign, src, radix) { + var str = src.replace(/_/g, ''); + + if (intOptions.asBigInt) { + switch (radix) { + case 2: + str = "0b".concat(str); + break; + + case 8: + str = "0o".concat(str); + break; + + case 16: + str = "0x".concat(str); + break; + } + + var _n = BigInt(str); + + return sign === '-' ? BigInt(-1) * _n : _n; + } + + var n = parseInt(str, radix); + return sign === '-' ? -1 * n : n; +} + +function intStringify(node, radix, prefix) { + var value = node.value; + + if (intIdentify(value)) { + var str = value.toString(radix); + return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; + } + + return stringifyNumber(node); +} + +var yaml11 = failsafe.concat([{ + identify: function identify(value) { + return value == null; + }, + createNode: function createNode(schema, value, ctx) { + return ctx.wrapScalars ? new Scalar(null) : null; + }, + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^(?:~|[Nn]ull|NULL)?$/, + resolve: function resolve() { + return null; + }, + options: nullOptions, + stringify: function stringify() { + return nullOptions.nullStr; + } +}, { + identify: function identify(value) { + return typeof value === 'boolean'; + }, + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, + resolve: function resolve() { + return true; + }, + options: boolOptions, + stringify: boolStringify +}, { + identify: function identify(value) { + return typeof value === 'boolean'; + }, + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i, + resolve: function resolve() { + return false; + }, + options: boolOptions, + stringify: boolStringify +}, { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'BIN', + test: /^([-+]?)0b([0-1_]+)$/, + resolve: function resolve(str, sign, bin) { + return intResolve(sign, bin, 2); + }, + stringify: function stringify(node) { + return intStringify(node, 2, '0b'); + } +}, { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'OCT', + test: /^([-+]?)0([0-7_]+)$/, + resolve: function resolve(str, sign, oct) { + return intResolve(sign, oct, 8); + }, + stringify: function stringify(node) { + return intStringify(node, 8, '0'); + } +}, { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^([-+]?)([0-9][0-9_]*)$/, + resolve: function resolve(str, sign, abs) { + return intResolve(sign, abs, 10); + }, + stringify: stringifyNumber +}, { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'HEX', + test: /^([-+]?)0x([0-9a-fA-F_]+)$/, + resolve: function resolve(str, sign, hex) { + return intResolve(sign, hex, 16); + }, + stringify: function stringify(node) { + return intStringify(node, 16, '0x'); + } +}, { + identify: function identify(value) { + return typeof value === 'number'; + }, + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^(?:[-+]?\.inf|(\.nan))$/i, + resolve: function resolve(str, nan) { + return nan ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY; + }, + stringify: stringifyNumber +}, { + identify: function identify(value) { + return typeof value === 'number'; + }, + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'EXP', + test: /^[-+]?([0-9][0-9_]*)?(\.[0-9_]*)?[eE][-+]?[0-9]+$/, + resolve: function resolve(str) { + return parseFloat(str.replace(/_/g, '')); + }, + stringify: function stringify(_ref2) { + var value = _ref2.value; + return Number(value).toExponential(); + } +}, { + identify: function identify(value) { + return typeof value === 'number'; + }, + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^[-+]?(?:[0-9][0-9_]*)?\.([0-9_]*)$/, + resolve: function resolve(str, frac) { + var node = new Scalar(parseFloat(str.replace(/_/g, ''))); + + if (frac) { + var f = frac.replace(/_/g, ''); + if (f[f.length - 1] === '0') node.minFractionDigits = f.length; + } + + return node; + }, + stringify: stringifyNumber +}], binary, omap, pairs, set, intTime, floatTime, timestamp); + +var schemas = { + core: core, + failsafe: failsafe, + json: json, + yaml11: yaml11 +}; +var tags = { + binary: binary, + bool: boolObj, + float: floatObj, + floatExp: expObj, + floatNaN: nanObj, + floatTime: floatTime, + int: intObj, + intHex: hexObj, + intOct: octObj, + intTime: intTime, + map: map, + null: nullObj, + omap: omap, + pairs: pairs, + seq: seq, + set: set, + timestamp: timestamp +}; + +function findTagObject(value, tagName, tags) { + if (tagName) { + var match = tags.filter(function (t) { + return t.tag === tagName; + }); + var tagObj = match.find(function (t) { + return !t.format; + }) || match[0]; + if (!tagObj) throw new Error("Tag ".concat(tagName, " not found")); + return tagObj; + } // TODO: deprecate/remove class check + + + return tags.find(function (t) { + return (t.identify && t.identify(value) || t.class && value instanceof t.class) && !t.format; + }); +} + +function createNode(value, tagName, ctx) { + if (value instanceof Node) return value; + var defaultPrefix = ctx.defaultPrefix, + onTagObj = ctx.onTagObj, + prevObjects = ctx.prevObjects, + schema = ctx.schema, + wrapScalars = ctx.wrapScalars; + if (tagName && tagName.startsWith('!!')) tagName = defaultPrefix + tagName.slice(2); + var tagObj = findTagObject(value, tagName, schema.tags); + + if (!tagObj) { + if (typeof value.toJSON === 'function') value = value.toJSON(); + if (!value || _typeof(value) !== 'object') return wrapScalars ? new Scalar(value) : value; + tagObj = value instanceof Map ? map : value[Symbol.iterator] ? seq : map; + } + + if (onTagObj) { + onTagObj(tagObj); + delete ctx.onTagObj; + } // Detect duplicate references to the same object & use Alias nodes for all + // after first. The `obj` wrapper allows for circular references to resolve. + + + var obj = { + value: undefined, + node: undefined + }; + + if (value && _typeof(value) === 'object' && prevObjects) { + var prev = prevObjects.get(value); + + if (prev) { + var alias = new Alias(prev); // leaves source dirty; must be cleaned by caller + + ctx.aliasNodes.push(alias); // defined along with prevObjects + + return alias; + } + + obj.value = value; + prevObjects.set(value, obj); + } + + obj.node = tagObj.createNode ? tagObj.createNode(ctx.schema, value, ctx) : wrapScalars ? new Scalar(value) : value; + if (tagName && obj.node instanceof Node) obj.node.tag = tagName; + return obj.node; +} + +function getSchemaTags(schemas, knownTags, customTags, schemaId) { + var tags = schemas[schemaId.replace(/\W/g, '')]; // 'yaml-1.1' -> 'yaml11' + + if (!tags) { + var keys = Object.keys(schemas).map(function (key) { + return JSON.stringify(key); + }).join(', '); + throw new Error("Unknown schema \"".concat(schemaId, "\"; use one of ").concat(keys)); + } + + if (Array.isArray(customTags)) { + var _iterator = _createForOfIteratorHelper(customTags), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var tag = _step.value; + tags = tags.concat(tag); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + } else if (typeof customTags === 'function') { + tags = customTags(tags.slice()); + } + + for (var i = 0; i < tags.length; ++i) { + var _tag = tags[i]; + + if (typeof _tag === 'string') { + var tagObj = knownTags[_tag]; + + if (!tagObj) { + var _keys = Object.keys(knownTags).map(function (key) { + return JSON.stringify(key); + }).join(', '); + + throw new Error("Unknown custom tag \"".concat(_tag, "\"; use one of ").concat(_keys)); + } + + tags[i] = tagObj; + } + } + + return tags; +} + +var sortMapEntriesByKey = function sortMapEntriesByKey(a, b) { + return a.key < b.key ? -1 : a.key > b.key ? 1 : 0; +}; + +var Schema = /*#__PURE__*/function () { + // TODO: remove in v2 + // TODO: remove in v2 + function Schema(_ref) { + var customTags = _ref.customTags, + merge = _ref.merge, + schema = _ref.schema, + sortMapEntries = _ref.sortMapEntries, + deprecatedCustomTags = _ref.tags; + + _classCallCheck(this, Schema); + + this.merge = !!merge; + this.name = schema; + this.sortMapEntries = sortMapEntries === true ? sortMapEntriesByKey : sortMapEntries || null; + if (!customTags && deprecatedCustomTags) warnOptionDeprecation('tags', 'customTags'); + this.tags = getSchemaTags(schemas, tags, customTags || deprecatedCustomTags, schema); + } + + _createClass(Schema, [{ + key: "createNode", + value: function createNode$1(value, wrapScalars, tagName, ctx) { + var baseCtx = { + defaultPrefix: Schema.defaultPrefix, + schema: this, + wrapScalars: wrapScalars + }; + var createCtx = ctx ? Object.assign(ctx, baseCtx) : baseCtx; + return createNode(value, tagName, createCtx); + } + }, { + key: "createPair", + value: function createPair(key, value, ctx) { + if (!ctx) ctx = { + wrapScalars: true + }; + var k = this.createNode(key, ctx.wrapScalars, null, ctx); + var v = this.createNode(value, ctx.wrapScalars, null, ctx); + return new Pair(k, v); + } + }]); + + return Schema; +}(); + +_defineProperty(Schema, "defaultPrefix", defaultTagPrefix); + +_defineProperty(Schema, "defaultTags", defaultTags); + +export { Schema as S }; diff --git a/node_modules/yaml/browser/dist/index.js b/node_modules/yaml/browser/dist/index.js new file mode 100644 index 0000000..e67898b --- /dev/null +++ b/node_modules/yaml/browser/dist/index.js @@ -0,0 +1,1002 @@ +import { d as defaultTagPrefix, _ as _createForOfIteratorHelper, a as _typeof, b as _createClass, c as _classCallCheck, e as _defineProperty, Y as YAMLSyntaxError, T as Type, f as YAMLWarning, g as YAMLSemanticError, h as _slicedToArray, i as YAMLError, j as _inherits, k as _createSuper } from './PlainValue-b8036b75.js'; +import { parse as parse$1 } from './parse-cst.js'; +import { b as binaryOptions, a as boolOptions, i as intOptions, n as nullOptions, s as strOptions, N as Node, P as Pair, S as Scalar, c as stringifyString, A as Alias, Y as YAMLSeq, d as YAMLMap, M as Merge, C as Collection, r as resolveNode, e as isEmptyPath, t as toJSON, f as addComment } from './resolveSeq-492ab440.js'; +import { S as Schema } from './Schema-e94716c8.js'; +import { w as warn } from './warnings-df54cb69.js'; + +var defaultOptions = { + anchorPrefix: 'a', + customTags: null, + indent: 2, + indentSeq: true, + keepCstNodes: false, + keepNodeTypes: true, + keepBlobsInJSON: true, + mapAsMap: false, + maxAliasCount: 100, + prettyErrors: false, + // TODO Set true in v2 + simpleKeys: false, + version: '1.2' +}; +var scalarOptions = { + get binary() { + return binaryOptions; + }, + + set binary(opt) { + Object.assign(binaryOptions, opt); + }, + + get bool() { + return boolOptions; + }, + + set bool(opt) { + Object.assign(boolOptions, opt); + }, + + get int() { + return intOptions; + }, + + set int(opt) { + Object.assign(intOptions, opt); + }, + + get null() { + return nullOptions; + }, + + set null(opt) { + Object.assign(nullOptions, opt); + }, + + get str() { + return strOptions; + }, + + set str(opt) { + Object.assign(strOptions, opt); + } + +}; +var documentOptions = { + '1.0': { + schema: 'yaml-1.1', + merge: true, + tagPrefixes: [{ + handle: '!', + prefix: defaultTagPrefix + }, { + handle: '!!', + prefix: 'tag:private.yaml.org,2002:' + }] + }, + 1.1: { + schema: 'yaml-1.1', + merge: true, + tagPrefixes: [{ + handle: '!', + prefix: '!' + }, { + handle: '!!', + prefix: defaultTagPrefix + }] + }, + 1.2: { + schema: 'core', + merge: false, + tagPrefixes: [{ + handle: '!', + prefix: '!' + }, { + handle: '!!', + prefix: defaultTagPrefix + }] + } +}; + +function stringifyTag(doc, tag) { + if ((doc.version || doc.options.version) === '1.0') { + var priv = tag.match(/^tag:private\.yaml\.org,2002:([^:/]+)$/); + if (priv) return '!' + priv[1]; + var vocab = tag.match(/^tag:([a-zA-Z0-9-]+)\.yaml\.org,2002:(.*)/); + return vocab ? "!".concat(vocab[1], "/").concat(vocab[2]) : "!".concat(tag.replace(/^tag:/, '')); + } + + var p = doc.tagPrefixes.find(function (p) { + return tag.indexOf(p.prefix) === 0; + }); + + if (!p) { + var dtp = doc.getDefaults().tagPrefixes; + p = dtp && dtp.find(function (p) { + return tag.indexOf(p.prefix) === 0; + }); + } + + if (!p) return tag[0] === '!' ? tag : "!<".concat(tag, ">"); + var suffix = tag.substr(p.prefix.length).replace(/[!,[\]{}]/g, function (ch) { + return { + '!': '%21', + ',': '%2C', + '[': '%5B', + ']': '%5D', + '{': '%7B', + '}': '%7D' + }[ch]; + }); + return p.handle + suffix; +} + +function getTagObject(tags, item) { + if (item instanceof Alias) return Alias; + + if (item.tag) { + var match = tags.filter(function (t) { + return t.tag === item.tag; + }); + if (match.length > 0) return match.find(function (t) { + return t.format === item.format; + }) || match[0]; + } + + var tagObj, obj; + + if (item instanceof Scalar) { + obj = item.value; // TODO: deprecate/remove class check + + var _match = tags.filter(function (t) { + return t.identify && t.identify(obj) || t.class && obj instanceof t.class; + }); + + tagObj = _match.find(function (t) { + return t.format === item.format; + }) || _match.find(function (t) { + return !t.format; + }); + } else { + obj = item; + tagObj = tags.find(function (t) { + return t.nodeClass && obj instanceof t.nodeClass; + }); + } + + if (!tagObj) { + var name = obj && obj.constructor ? obj.constructor.name : _typeof(obj); + throw new Error("Tag not resolved for ".concat(name, " value")); + } + + return tagObj; +} // needs to be called before value stringifier to allow for circular anchor refs + + +function stringifyProps(node, tagObj, _ref) { + var anchors = _ref.anchors, + doc = _ref.doc; + var props = []; + var anchor = doc.anchors.getName(node); + + if (anchor) { + anchors[anchor] = node; + props.push("&".concat(anchor)); + } + + if (node.tag) { + props.push(stringifyTag(doc, node.tag)); + } else if (!tagObj.default) { + props.push(stringifyTag(doc, tagObj.tag)); + } + + return props.join(' '); +} + +function stringify$1(item, ctx, onComment, onChompKeep) { + var _ctx$doc = ctx.doc, + anchors = _ctx$doc.anchors, + schema = _ctx$doc.schema; + var tagObj; + + if (!(item instanceof Node)) { + var createCtx = { + aliasNodes: [], + onTagObj: function onTagObj(o) { + return tagObj = o; + }, + prevObjects: new Map() + }; + item = schema.createNode(item, true, null, createCtx); + + var _iterator = _createForOfIteratorHelper(createCtx.aliasNodes), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var alias = _step.value; + alias.source = alias.source.node; + var name = anchors.getName(alias.source); + + if (!name) { + name = anchors.newName(); + anchors.map[name] = alias.source; + } + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + } + + if (item instanceof Pair) return item.toString(ctx, onComment, onChompKeep); + if (!tagObj) tagObj = getTagObject(schema.tags, item); + var props = stringifyProps(item, tagObj, ctx); + if (props.length > 0) ctx.indentAtStart = (ctx.indentAtStart || 0) + props.length + 1; + var str = typeof tagObj.stringify === 'function' ? tagObj.stringify(item, ctx, onComment, onChompKeep) : item instanceof Scalar ? stringifyString(item, ctx, onComment, onChompKeep) : item.toString(ctx, onComment, onChompKeep); + if (!props) return str; + return item instanceof Scalar || str[0] === '{' || str[0] === '[' ? "".concat(props, " ").concat(str) : "".concat(props, "\n").concat(ctx.indent).concat(str); +} + +var Anchors = /*#__PURE__*/function () { + function Anchors(prefix) { + _classCallCheck(this, Anchors); + + _defineProperty(this, "map", Object.create(null)); + + this.prefix = prefix; + } + + _createClass(Anchors, [{ + key: "createAlias", + value: function createAlias(node, name) { + this.setAnchor(node, name); + return new Alias(node); + } + }, { + key: "createMergePair", + value: function createMergePair() { + var _this = this; + + var merge = new Merge(); + + for (var _len = arguments.length, sources = new Array(_len), _key = 0; _key < _len; _key++) { + sources[_key] = arguments[_key]; + } + + merge.value.items = sources.map(function (s) { + if (s instanceof Alias) { + if (s.source instanceof YAMLMap) return s; + } else if (s instanceof YAMLMap) { + return _this.createAlias(s); + } + + throw new Error('Merge sources must be Map nodes or their Aliases'); + }); + return merge; + } + }, { + key: "getName", + value: function getName(node) { + var map = this.map; + return Object.keys(map).find(function (a) { + return map[a] === node; + }); + } + }, { + key: "getNames", + value: function getNames() { + return Object.keys(this.map); + } + }, { + key: "getNode", + value: function getNode(name) { + return this.map[name]; + } + }, { + key: "newName", + value: function newName(prefix) { + if (!prefix) prefix = this.prefix; + var names = Object.keys(this.map); + + for (var i = 1; true; ++i) { + var name = "".concat(prefix).concat(i); + if (!names.includes(name)) return name; + } + } // During parsing, map & aliases contain CST nodes + + }, { + key: "resolveNodes", + value: function resolveNodes() { + var map = this.map, + _cstAliases = this._cstAliases; + Object.keys(map).forEach(function (a) { + map[a] = map[a].resolved; + }); + + _cstAliases.forEach(function (a) { + a.source = a.source.resolved; + }); + + delete this._cstAliases; + } + }, { + key: "setAnchor", + value: function setAnchor(node, name) { + if (node != null && !Anchors.validAnchorNode(node)) { + throw new Error('Anchors may only be set for Scalar, Seq and Map nodes'); + } + + if (name && /[\x00-\x19\s,[\]{}]/.test(name)) { + throw new Error('Anchor names must not contain whitespace or control characters'); + } + + var map = this.map; + var prev = node && Object.keys(map).find(function (a) { + return map[a] === node; + }); + + if (prev) { + if (!name) { + return prev; + } else if (prev !== name) { + delete map[prev]; + map[name] = node; + } + } else { + if (!name) { + if (!node) return null; + name = this.newName(); + } + + map[name] = node; + } + + return name; + } + }], [{ + key: "validAnchorNode", + value: function validAnchorNode(node) { + return node instanceof Scalar || node instanceof YAMLSeq || node instanceof YAMLMap; + } + }]); + + return Anchors; +}(); + +var visit = function visit(node, tags) { + if (node && _typeof(node) === 'object') { + var tag = node.tag; + + if (node instanceof Collection) { + if (tag) tags[tag] = true; + node.items.forEach(function (n) { + return visit(n, tags); + }); + } else if (node instanceof Pair) { + visit(node.key, tags); + visit(node.value, tags); + } else if (node instanceof Scalar) { + if (tag) tags[tag] = true; + } + } + + return tags; +}; + +var listTagNames = function listTagNames(node) { + return Object.keys(visit(node, {})); +}; + +function parseContents(doc, contents) { + var comments = { + before: [], + after: [] + }; + var body = undefined; + var spaceBefore = false; + + var _iterator = _createForOfIteratorHelper(contents), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var node = _step.value; + + if (node.valueRange) { + if (body !== undefined) { + var msg = 'Document contains trailing content not separated by a ... or --- line'; + doc.errors.push(new YAMLSyntaxError(node, msg)); + break; + } + + var res = resolveNode(doc, node); + + if (spaceBefore) { + res.spaceBefore = true; + spaceBefore = false; + } + + body = res; + } else if (node.comment !== null) { + var cc = body === undefined ? comments.before : comments.after; + cc.push(node.comment); + } else if (node.type === Type.BLANK_LINE) { + spaceBefore = true; + + if (body === undefined && comments.before.length > 0 && !doc.commentBefore) { + // space-separated comments at start are parsed as document comments + doc.commentBefore = comments.before.join('\n'); + comments.before = []; + } + } + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + doc.contents = body || null; + + if (!body) { + doc.comment = comments.before.concat(comments.after).join('\n') || null; + } else { + var cb = comments.before.join('\n'); + + if (cb) { + var cbNode = body instanceof Collection && body.items[0] ? body.items[0] : body; + cbNode.commentBefore = cbNode.commentBefore ? "".concat(cb, "\n").concat(cbNode.commentBefore) : cb; + } + + doc.comment = comments.after.join('\n') || null; + } +} + +function resolveTagDirective(_ref, directive) { + var tagPrefixes = _ref.tagPrefixes; + + var _directive$parameters = _slicedToArray(directive.parameters, 2), + handle = _directive$parameters[0], + prefix = _directive$parameters[1]; + + if (!handle || !prefix) { + var msg = 'Insufficient parameters given for %TAG directive'; + throw new YAMLSemanticError(directive, msg); + } + + if (tagPrefixes.some(function (p) { + return p.handle === handle; + })) { + var _msg = 'The %TAG directive must only be given at most once per handle in the same document.'; + throw new YAMLSemanticError(directive, _msg); + } + + return { + handle: handle, + prefix: prefix + }; +} + +function resolveYamlDirective(doc, directive) { + var _directive$parameters2 = _slicedToArray(directive.parameters, 1), + version = _directive$parameters2[0]; + + if (directive.name === 'YAML:1.0') version = '1.0'; + + if (!version) { + var msg = 'Insufficient parameters given for %YAML directive'; + throw new YAMLSemanticError(directive, msg); + } + + if (!documentOptions[version]) { + var v0 = doc.version || doc.options.version; + + var _msg2 = "Document will be parsed as YAML ".concat(v0, " rather than YAML ").concat(version); + + doc.warnings.push(new YAMLWarning(directive, _msg2)); + } + + return version; +} + +function parseDirectives(doc, directives, prevDoc) { + var directiveComments = []; + var hasDirectives = false; + + var _iterator = _createForOfIteratorHelper(directives), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var directive = _step.value; + var comment = directive.comment, + name = directive.name; + + switch (name) { + case 'TAG': + try { + doc.tagPrefixes.push(resolveTagDirective(doc, directive)); + } catch (error) { + doc.errors.push(error); + } + + hasDirectives = true; + break; + + case 'YAML': + case 'YAML:1.0': + if (doc.version) { + var msg = 'The %YAML directive must only be given at most once per document.'; + doc.errors.push(new YAMLSemanticError(directive, msg)); + } + + try { + doc.version = resolveYamlDirective(doc, directive); + } catch (error) { + doc.errors.push(error); + } + + hasDirectives = true; + break; + + default: + if (name) { + var _msg3 = "YAML only supports %TAG and %YAML directives, and not %".concat(name); + + doc.warnings.push(new YAMLWarning(directive, _msg3)); + } + + } + + if (comment) directiveComments.push(comment); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + if (prevDoc && !hasDirectives && '1.1' === (doc.version || prevDoc.version || doc.options.version)) { + var copyTagPrefix = function copyTagPrefix(_ref2) { + var handle = _ref2.handle, + prefix = _ref2.prefix; + return { + handle: handle, + prefix: prefix + }; + }; + + doc.tagPrefixes = prevDoc.tagPrefixes.map(copyTagPrefix); + doc.version = prevDoc.version; + } + + doc.commentBefore = directiveComments.join('\n') || null; +} + +function assertCollection(contents) { + if (contents instanceof Collection) return true; + throw new Error('Expected a YAML collection as document contents'); +} + +var Document$1 = /*#__PURE__*/function () { + function Document(options) { + _classCallCheck(this, Document); + + this.anchors = new Anchors(options.anchorPrefix); + this.commentBefore = null; + this.comment = null; + this.contents = null; + this.directivesEndMarker = null; + this.errors = []; + this.options = options; + this.schema = null; + this.tagPrefixes = []; + this.version = null; + this.warnings = []; + } + + _createClass(Document, [{ + key: "add", + value: function add(value) { + assertCollection(this.contents); + return this.contents.add(value); + } + }, { + key: "addIn", + value: function addIn(path, value) { + assertCollection(this.contents); + this.contents.addIn(path, value); + } + }, { + key: "delete", + value: function _delete(key) { + assertCollection(this.contents); + return this.contents.delete(key); + } + }, { + key: "deleteIn", + value: function deleteIn(path) { + if (isEmptyPath(path)) { + if (this.contents == null) return false; + this.contents = null; + return true; + } + + assertCollection(this.contents); + return this.contents.deleteIn(path); + } + }, { + key: "getDefaults", + value: function getDefaults() { + return Document.defaults[this.version] || Document.defaults[this.options.version] || {}; + } + }, { + key: "get", + value: function get(key, keepScalar) { + return this.contents instanceof Collection ? this.contents.get(key, keepScalar) : undefined; + } + }, { + key: "getIn", + value: function getIn(path, keepScalar) { + if (isEmptyPath(path)) return !keepScalar && this.contents instanceof Scalar ? this.contents.value : this.contents; + return this.contents instanceof Collection ? this.contents.getIn(path, keepScalar) : undefined; + } + }, { + key: "has", + value: function has(key) { + return this.contents instanceof Collection ? this.contents.has(key) : false; + } + }, { + key: "hasIn", + value: function hasIn(path) { + if (isEmptyPath(path)) return this.contents !== undefined; + return this.contents instanceof Collection ? this.contents.hasIn(path) : false; + } + }, { + key: "set", + value: function set(key, value) { + assertCollection(this.contents); + this.contents.set(key, value); + } + }, { + key: "setIn", + value: function setIn(path, value) { + if (isEmptyPath(path)) this.contents = value;else { + assertCollection(this.contents); + this.contents.setIn(path, value); + } + } + }, { + key: "setSchema", + value: function setSchema(id, customTags) { + if (!id && !customTags && this.schema) return; + if (typeof id === 'number') id = id.toFixed(1); + + if (id === '1.0' || id === '1.1' || id === '1.2') { + if (this.version) this.version = id;else this.options.version = id; + delete this.options.schema; + } else if (id && typeof id === 'string') { + this.options.schema = id; + } + + if (Array.isArray(customTags)) this.options.customTags = customTags; + var opt = Object.assign({}, this.getDefaults(), this.options); + this.schema = new Schema(opt); + } + }, { + key: "parse", + value: function parse(node, prevDoc) { + if (this.options.keepCstNodes) this.cstNode = node; + if (this.options.keepNodeTypes) this.type = 'DOCUMENT'; + var _node$directives = node.directives, + directives = _node$directives === void 0 ? [] : _node$directives, + _node$contents = node.contents, + contents = _node$contents === void 0 ? [] : _node$contents, + directivesEndMarker = node.directivesEndMarker, + error = node.error, + valueRange = node.valueRange; + + if (error) { + if (!error.source) error.source = this; + this.errors.push(error); + } + + parseDirectives(this, directives, prevDoc); + if (directivesEndMarker) this.directivesEndMarker = true; + this.range = valueRange ? [valueRange.start, valueRange.end] : null; + this.setSchema(); + this.anchors._cstAliases = []; + parseContents(this, contents); + this.anchors.resolveNodes(); + + if (this.options.prettyErrors) { + var _iterator = _createForOfIteratorHelper(this.errors), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var _error = _step.value; + if (_error instanceof YAMLError) _error.makePretty(); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + var _iterator2 = _createForOfIteratorHelper(this.warnings), + _step2; + + try { + for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) { + var warn = _step2.value; + if (warn instanceof YAMLError) warn.makePretty(); + } + } catch (err) { + _iterator2.e(err); + } finally { + _iterator2.f(); + } + } + + return this; + } + }, { + key: "listNonDefaultTags", + value: function listNonDefaultTags() { + return listTagNames(this.contents).filter(function (t) { + return t.indexOf(Schema.defaultPrefix) !== 0; + }); + } + }, { + key: "setTagPrefix", + value: function setTagPrefix(handle, prefix) { + if (handle[0] !== '!' || handle[handle.length - 1] !== '!') throw new Error('Handle must start and end with !'); + + if (prefix) { + var prev = this.tagPrefixes.find(function (p) { + return p.handle === handle; + }); + if (prev) prev.prefix = prefix;else this.tagPrefixes.push({ + handle: handle, + prefix: prefix + }); + } else { + this.tagPrefixes = this.tagPrefixes.filter(function (p) { + return p.handle !== handle; + }); + } + } + }, { + key: "toJSON", + value: function toJSON$1(arg, onAnchor) { + var _this = this; + + var _this$options = this.options, + keepBlobsInJSON = _this$options.keepBlobsInJSON, + mapAsMap = _this$options.mapAsMap, + maxAliasCount = _this$options.maxAliasCount; + var keep = keepBlobsInJSON && (typeof arg !== 'string' || !(this.contents instanceof Scalar)); + var ctx = { + doc: this, + indentStep: ' ', + keep: keep, + mapAsMap: keep && !!mapAsMap, + maxAliasCount: maxAliasCount, + stringify: stringify$1 // Requiring directly in Pair would create circular dependencies + + }; + var anchorNames = Object.keys(this.anchors.map); + if (anchorNames.length > 0) ctx.anchors = new Map(anchorNames.map(function (name) { + return [_this.anchors.map[name], { + alias: [], + aliasCount: 0, + count: 1 + }]; + })); + + var res = toJSON(this.contents, arg, ctx); + + if (typeof onAnchor === 'function' && ctx.anchors) { + var _iterator3 = _createForOfIteratorHelper(ctx.anchors.values()), + _step3; + + try { + for (_iterator3.s(); !(_step3 = _iterator3.n()).done;) { + var _step3$value = _step3.value, + count = _step3$value.count, + _res = _step3$value.res; + onAnchor(_res, count); + } + } catch (err) { + _iterator3.e(err); + } finally { + _iterator3.f(); + } + } + + return res; + } + }, { + key: "toString", + value: function toString() { + if (this.errors.length > 0) throw new Error('Document with errors cannot be stringified'); + var indentSize = this.options.indent; + + if (!Number.isInteger(indentSize) || indentSize <= 0) { + var s = JSON.stringify(indentSize); + throw new Error("\"indent\" option must be a positive integer, not ".concat(s)); + } + + this.setSchema(); + var lines = []; + var hasDirectives = false; + + if (this.version) { + var vd = '%YAML 1.2'; + + if (this.schema.name === 'yaml-1.1') { + if (this.version === '1.0') vd = '%YAML:1.0';else if (this.version === '1.1') vd = '%YAML 1.1'; + } + + lines.push(vd); + hasDirectives = true; + } + + var tagNames = this.listNonDefaultTags(); + this.tagPrefixes.forEach(function (_ref) { + var handle = _ref.handle, + prefix = _ref.prefix; + + if (tagNames.some(function (t) { + return t.indexOf(prefix) === 0; + })) { + lines.push("%TAG ".concat(handle, " ").concat(prefix)); + hasDirectives = true; + } + }); + if (hasDirectives || this.directivesEndMarker) lines.push('---'); + + if (this.commentBefore) { + if (hasDirectives || !this.directivesEndMarker) lines.unshift(''); + lines.unshift(this.commentBefore.replace(/^/gm, '#')); + } + + var ctx = { + anchors: Object.create(null), + doc: this, + indent: '', + indentStep: ' '.repeat(indentSize), + stringify: stringify$1 // Requiring directly in nodes would create circular dependencies + + }; + var chompKeep = false; + var contentComment = null; + + if (this.contents) { + if (this.contents instanceof Node) { + if (this.contents.spaceBefore && (hasDirectives || this.directivesEndMarker)) lines.push(''); + if (this.contents.commentBefore) lines.push(this.contents.commentBefore.replace(/^/gm, '#')); // top-level block scalars need to be indented if followed by a comment + + ctx.forceBlockIndent = !!this.comment; + contentComment = this.contents.comment; + } + + var onChompKeep = contentComment ? null : function () { + return chompKeep = true; + }; + var body = stringify$1(this.contents, ctx, function () { + return contentComment = null; + }, onChompKeep); + lines.push(addComment(body, '', contentComment)); + } else if (this.contents !== undefined) { + lines.push(stringify$1(this.contents, ctx)); + } + + if (this.comment) { + if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') lines.push(''); + lines.push(this.comment.replace(/^/gm, '#')); + } + + return lines.join('\n') + '\n'; + } + }]); + + return Document; +}(); + +_defineProperty(Document$1, "defaults", documentOptions); + +function createNode(value) { + var wrapScalars = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true; + var tag = arguments.length > 2 ? arguments[2] : undefined; + + if (tag === undefined && typeof wrapScalars === 'string') { + tag = wrapScalars; + wrapScalars = true; + } + + var options = Object.assign({}, Document$1.defaults[defaultOptions.version], defaultOptions); + var schema = new Schema(options); + return schema.createNode(value, wrapScalars, tag); +} + +var Document = /*#__PURE__*/function (_YAMLDocument) { + _inherits(Document, _YAMLDocument); + + var _super = _createSuper(Document); + + function Document(options) { + _classCallCheck(this, Document); + + return _super.call(this, Object.assign({}, defaultOptions, options)); + } + + return Document; +}(Document$1); + +function parseAllDocuments(src, options) { + var stream = []; + var prev; + + var _iterator = _createForOfIteratorHelper(parse$1(src)), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var cstDoc = _step.value; + var doc = new Document(options); + doc.parse(cstDoc, prev); + stream.push(doc); + prev = doc; + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return stream; +} + +function parseDocument(src, options) { + var cst = parse$1(src); + var doc = new Document(options).parse(cst[0]); + + if (cst.length > 1) { + var errMsg = 'Source contains multiple documents; please use YAML.parseAllDocuments()'; + doc.errors.unshift(new YAMLSemanticError(cst[1], errMsg)); + } + + return doc; +} + +function parse(src, options) { + var doc = parseDocument(src, options); + doc.warnings.forEach(function (warning) { + return warn(warning); + }); + if (doc.errors.length > 0) throw doc.errors[0]; + return doc.toJSON(); +} + +function stringify(value, options) { + var doc = new Document(options); + doc.contents = value; + return String(doc); +} + +var YAML = { + createNode: createNode, + defaultOptions: defaultOptions, + Document: Document, + parse: parse, + parseAllDocuments: parseAllDocuments, + parseCST: parse$1, + parseDocument: parseDocument, + scalarOptions: scalarOptions, + stringify: stringify +}; + +export { YAML }; diff --git a/node_modules/yaml/browser/dist/legacy-exports.js b/node_modules/yaml/browser/dist/legacy-exports.js new file mode 100644 index 0000000..4400ed6 --- /dev/null +++ b/node_modules/yaml/browser/dist/legacy-exports.js @@ -0,0 +1,3 @@ +export { b as binary, f as floatTime, i as intTime, o as omap, p as pairs, s as set, t as timestamp, c as warnFileDeprecation } from './warnings-df54cb69.js'; +import './PlainValue-b8036b75.js'; +import './resolveSeq-492ab440.js'; diff --git a/node_modules/yaml/browser/dist/package.json b/node_modules/yaml/browser/dist/package.json new file mode 100644 index 0000000..5ffd980 --- /dev/null +++ b/node_modules/yaml/browser/dist/package.json @@ -0,0 +1 @@ +{ "type": "module" } diff --git a/node_modules/yaml/browser/dist/parse-cst.js b/node_modules/yaml/browser/dist/parse-cst.js new file mode 100644 index 0000000..b68cf3c --- /dev/null +++ b/node_modules/yaml/browser/dist/parse-cst.js @@ -0,0 +1,1904 @@ +import { j as _inherits, k as _createSuper, c as _classCallCheck, T as Type, b as _createClass, R as Range, N as Node, g as YAMLSemanticError, l as _get, m as _getPrototypeOf, Y as YAMLSyntaxError, C as Char, e as _defineProperty, P as PlainValue } from './PlainValue-b8036b75.js'; + +var BlankLine = /*#__PURE__*/function (_Node) { + _inherits(BlankLine, _Node); + + var _super = _createSuper(BlankLine); + + function BlankLine() { + _classCallCheck(this, BlankLine); + + return _super.call(this, Type.BLANK_LINE); + } + /* istanbul ignore next */ + + + _createClass(BlankLine, [{ + key: "includesTrailingLines", + get: function get() { + // This is never called from anywhere, but if it were, + // this is the value it should return. + return true; + } + /** + * Parses a blank line from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first \n character + * @returns {number} - Index of the character after this + */ + + }, { + key: "parse", + value: function parse(context, start) { + this.context = context; + this.range = new Range(start, start + 1); + return start + 1; + } + }]); + + return BlankLine; +}(Node); + +var CollectionItem = /*#__PURE__*/function (_Node) { + _inherits(CollectionItem, _Node); + + var _super = _createSuper(CollectionItem); + + function CollectionItem(type, props) { + var _this; + + _classCallCheck(this, CollectionItem); + + _this = _super.call(this, type, props); + _this.node = null; + return _this; + } + + _createClass(CollectionItem, [{ + key: "includesTrailingLines", + get: function get() { + return !!this.node && this.node.includesTrailingLines; + } + /** + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this + */ + + }, { + key: "parse", + value: function parse(context, start) { + this.context = context; + var parseNode = context.parseNode, + src = context.src; + var atLineStart = context.atLineStart, + lineStart = context.lineStart; + if (!atLineStart && this.type === Type.SEQ_ITEM) this.error = new YAMLSemanticError(this, 'Sequence items must not have preceding content on the same line'); + var indent = atLineStart ? start - lineStart : context.indent; + var offset = Node.endOfWhiteSpace(src, start + 1); + var ch = src[offset]; + var inlineComment = ch === '#'; + var comments = []; + var blankLine = null; + + while (ch === '\n' || ch === '#') { + if (ch === '#') { + var _end = Node.endOfLine(src, offset + 1); + + comments.push(new Range(offset, _end)); + offset = _end; + } else { + atLineStart = true; + lineStart = offset + 1; + var wsEnd = Node.endOfWhiteSpace(src, lineStart); + + if (src[wsEnd] === '\n' && comments.length === 0) { + blankLine = new BlankLine(); + lineStart = blankLine.parse({ + src: src + }, lineStart); + } + + offset = Node.endOfIndent(src, lineStart); + } + + ch = src[offset]; + } + + if (Node.nextNodeIsIndented(ch, offset - (lineStart + indent), this.type !== Type.SEQ_ITEM)) { + this.node = parseNode({ + atLineStart: atLineStart, + inCollection: false, + indent: indent, + lineStart: lineStart, + parent: this + }, offset); + } else if (ch && lineStart > start + 1) { + offset = lineStart - 1; + } + + if (this.node) { + if (blankLine) { + // Only blank lines preceding non-empty nodes are captured. Note that + // this means that collection item range start indices do not always + // increase monotonically. -- eemeli/yaml#126 + var items = context.parent.items || context.parent.contents; + if (items) items.push(blankLine); + } + + if (comments.length) Array.prototype.push.apply(this.props, comments); + offset = this.node.range.end; + } else { + if (inlineComment) { + var c = comments[0]; + this.props.push(c); + offset = c.end; + } else { + offset = Node.endOfLine(src, start + 1); + } + } + + var end = this.node ? this.node.valueRange.end : offset; + this.valueRange = new Range(start, end); + return offset; + } + }, { + key: "setOrigRanges", + value: function setOrigRanges(cr, offset) { + offset = _get(_getPrototypeOf(CollectionItem.prototype), "setOrigRanges", this).call(this, cr, offset); + return this.node ? this.node.setOrigRanges(cr, offset) : offset; + } + }, { + key: "toString", + value: function toString() { + var src = this.context.src, + node = this.node, + range = this.range, + value = this.value; + if (value != null) return value; + var str = node ? src.slice(range.start, node.range.start) + String(node) : src.slice(range.start, range.end); + return Node.addStringTerminator(src, range.end, str); + } + }]); + + return CollectionItem; +}(Node); + +var Comment = /*#__PURE__*/function (_Node) { + _inherits(Comment, _Node); + + var _super = _createSuper(Comment); + + function Comment() { + _classCallCheck(this, Comment); + + return _super.call(this, Type.COMMENT); + } + /** + * Parses a comment line from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar + */ + + + _createClass(Comment, [{ + key: "parse", + value: function parse(context, start) { + this.context = context; + var offset = this.parseComment(start); + this.range = new Range(start, offset); + return offset; + } + }]); + + return Comment; +}(Node); + +function grabCollectionEndComments(node) { + var cnode = node; + + while (cnode instanceof CollectionItem) { + cnode = cnode.node; + } + + if (!(cnode instanceof Collection)) return null; + var len = cnode.items.length; + var ci = -1; + + for (var i = len - 1; i >= 0; --i) { + var n = cnode.items[i]; + + if (n.type === Type.COMMENT) { + // Keep sufficiently indented comments with preceding node + var _n$context = n.context, + indent = _n$context.indent, + lineStart = _n$context.lineStart; + if (indent > 0 && n.range.start >= lineStart + indent) break; + ci = i; + } else if (n.type === Type.BLANK_LINE) ci = i;else break; + } + + if (ci === -1) return null; + var ca = cnode.items.splice(ci, len - ci); + var prevEnd = ca[0].range.start; + + while (true) { + cnode.range.end = prevEnd; + if (cnode.valueRange && cnode.valueRange.end > prevEnd) cnode.valueRange.end = prevEnd; + if (cnode === node) break; + cnode = cnode.context.parent; + } + + return ca; +} +var Collection = /*#__PURE__*/function (_Node) { + _inherits(Collection, _Node); + + var _super = _createSuper(Collection); + + function Collection(firstItem) { + var _this; + + _classCallCheck(this, Collection); + + _this = _super.call(this, firstItem.type === Type.SEQ_ITEM ? Type.SEQ : Type.MAP); + + for (var i = firstItem.props.length - 1; i >= 0; --i) { + if (firstItem.props[i].start < firstItem.context.lineStart) { + // props on previous line are assumed by the collection + _this.props = firstItem.props.slice(0, i + 1); + firstItem.props = firstItem.props.slice(i + 1); + var itemRange = firstItem.props[0] || firstItem.valueRange; + firstItem.range.start = itemRange.start; + break; + } + } + + _this.items = [firstItem]; + var ec = grabCollectionEndComments(firstItem); + if (ec) Array.prototype.push.apply(_this.items, ec); + return _this; + } + + _createClass(Collection, [{ + key: "includesTrailingLines", + get: function get() { + return this.items.length > 0; + } + /** + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this + */ + + }, { + key: "parse", + value: function parse(context, start) { + this.context = context; + var parseNode = context.parseNode, + src = context.src; // It's easier to recalculate lineStart here rather than tracking down the + // last context from which to read it -- eemeli/yaml#2 + + var lineStart = Node.startOfLine(src, start); + var firstItem = this.items[0]; // First-item context needs to be correct for later comment handling + // -- eemeli/yaml#17 + + firstItem.context.parent = this; + this.valueRange = Range.copy(firstItem.valueRange); + var indent = firstItem.range.start - firstItem.context.lineStart; + var offset = start; + offset = Node.normalizeOffset(src, offset); + var ch = src[offset]; + var atLineStart = Node.endOfWhiteSpace(src, lineStart) === offset; + var prevIncludesTrailingLines = false; + + while (ch) { + while (ch === '\n' || ch === '#') { + if (atLineStart && ch === '\n' && !prevIncludesTrailingLines) { + var blankLine = new BlankLine(); + offset = blankLine.parse({ + src: src + }, offset); + this.valueRange.end = offset; + + if (offset >= src.length) { + ch = null; + break; + } + + this.items.push(blankLine); + offset -= 1; // blankLine.parse() consumes terminal newline + } else if (ch === '#') { + if (offset < lineStart + indent && !Collection.nextContentHasIndent(src, offset, indent)) { + return offset; + } + + var comment = new Comment(); + offset = comment.parse({ + indent: indent, + lineStart: lineStart, + src: src + }, offset); + this.items.push(comment); + this.valueRange.end = offset; + + if (offset >= src.length) { + ch = null; + break; + } + } + + lineStart = offset + 1; + offset = Node.endOfIndent(src, lineStart); + + if (Node.atBlank(src, offset)) { + var wsEnd = Node.endOfWhiteSpace(src, offset); + var next = src[wsEnd]; + + if (!next || next === '\n' || next === '#') { + offset = wsEnd; + } + } + + ch = src[offset]; + atLineStart = true; + } + + if (!ch) { + break; + } + + if (offset !== lineStart + indent && (atLineStart || ch !== ':')) { + if (offset < lineStart + indent) { + if (lineStart > start) offset = lineStart; + break; + } else if (!this.error) { + var msg = 'All collection items must start at the same column'; + this.error = new YAMLSyntaxError(this, msg); + } + } + + if (firstItem.type === Type.SEQ_ITEM) { + if (ch !== '-') { + if (lineStart > start) offset = lineStart; + break; + } + } else if (ch === '-' && !this.error) { + // map key may start with -, as long as it's followed by a non-whitespace char + var _next = src[offset + 1]; + + if (!_next || _next === '\n' || _next === '\t' || _next === ' ') { + var _msg = 'A collection cannot be both a mapping and a sequence'; + this.error = new YAMLSyntaxError(this, _msg); + } + } + + var node = parseNode({ + atLineStart: atLineStart, + inCollection: true, + indent: indent, + lineStart: lineStart, + parent: this + }, offset); + if (!node) return offset; // at next document start + + this.items.push(node); + this.valueRange.end = node.valueRange.end; + offset = Node.normalizeOffset(src, node.range.end); + ch = src[offset]; + atLineStart = false; + prevIncludesTrailingLines = node.includesTrailingLines; // Need to reset lineStart and atLineStart here if preceding node's range + // has advanced to check the current line's indentation level + // -- eemeli/yaml#10 & eemeli/yaml#38 + + if (ch) { + var ls = offset - 1; + var prev = src[ls]; + + while (prev === ' ' || prev === '\t') { + prev = src[--ls]; + } + + if (prev === '\n') { + lineStart = ls + 1; + atLineStart = true; + } + } + + var ec = grabCollectionEndComments(node); + if (ec) Array.prototype.push.apply(this.items, ec); + } + + return offset; + } + }, { + key: "setOrigRanges", + value: function setOrigRanges(cr, offset) { + offset = _get(_getPrototypeOf(Collection.prototype), "setOrigRanges", this).call(this, cr, offset); + this.items.forEach(function (node) { + offset = node.setOrigRanges(cr, offset); + }); + return offset; + } + }, { + key: "toString", + value: function toString() { + var src = this.context.src, + items = this.items, + range = this.range, + value = this.value; + if (value != null) return value; + var str = src.slice(range.start, items[0].range.start) + String(items[0]); + + for (var i = 1; i < items.length; ++i) { + var item = items[i]; + var _item$context = item.context, + atLineStart = _item$context.atLineStart, + indent = _item$context.indent; + if (atLineStart) for (var _i = 0; _i < indent; ++_i) { + str += ' '; + } + str += String(item); + } + + return Node.addStringTerminator(src, range.end, str); + } + }], [{ + key: "nextContentHasIndent", + value: function nextContentHasIndent(src, offset, indent) { + var lineStart = Node.endOfLine(src, offset) + 1; + offset = Node.endOfWhiteSpace(src, lineStart); + var ch = src[offset]; + if (!ch) return false; + if (offset >= lineStart + indent) return true; + if (ch !== '#' && ch !== '\n') return false; + return Collection.nextContentHasIndent(src, offset, indent); + } + }]); + + return Collection; +}(Node); + +var Directive = /*#__PURE__*/function (_Node) { + _inherits(Directive, _Node); + + var _super = _createSuper(Directive); + + function Directive() { + var _this; + + _classCallCheck(this, Directive); + + _this = _super.call(this, Type.DIRECTIVE); + _this.name = null; + return _this; + } + + _createClass(Directive, [{ + key: "parameters", + get: function get() { + var raw = this.rawValue; + return raw ? raw.trim().split(/[ \t]+/) : []; + } + }, { + key: "parseName", + value: function parseName(start) { + var src = this.context.src; + var offset = start; + var ch = src[offset]; + + while (ch && ch !== '\n' && ch !== '\t' && ch !== ' ') { + ch = src[offset += 1]; + } + + this.name = src.slice(start, offset); + return offset; + } + }, { + key: "parseParameters", + value: function parseParameters(start) { + var src = this.context.src; + var offset = start; + var ch = src[offset]; + + while (ch && ch !== '\n' && ch !== '#') { + ch = src[offset += 1]; + } + + this.valueRange = new Range(start, offset); + return offset; + } + }, { + key: "parse", + value: function parse(context, start) { + this.context = context; + var offset = this.parseName(start + 1); + offset = this.parseParameters(offset); + offset = this.parseComment(offset); + this.range = new Range(start, offset); + return offset; + } + }]); + + return Directive; +}(Node); + +var Document = /*#__PURE__*/function (_Node) { + _inherits(Document, _Node); + + var _super = _createSuper(Document); + + function Document() { + var _this; + + _classCallCheck(this, Document); + + _this = _super.call(this, Type.DOCUMENT); + _this.directives = null; + _this.contents = null; + _this.directivesEndMarker = null; + _this.documentEndMarker = null; + return _this; + } + + _createClass(Document, [{ + key: "parseDirectives", + value: function parseDirectives(start) { + var src = this.context.src; + this.directives = []; + var atLineStart = true; + var hasDirectives = false; + var offset = start; + + while (!Node.atDocumentBoundary(src, offset, Char.DIRECTIVES_END)) { + offset = Document.startCommentOrEndBlankLine(src, offset); + + switch (src[offset]) { + case '\n': + if (atLineStart) { + var blankLine = new BlankLine(); + offset = blankLine.parse({ + src: src + }, offset); + + if (offset < src.length) { + this.directives.push(blankLine); + } + } else { + offset += 1; + atLineStart = true; + } + + break; + + case '#': + { + var comment = new Comment(); + offset = comment.parse({ + src: src + }, offset); + this.directives.push(comment); + atLineStart = false; + } + break; + + case '%': + { + var directive = new Directive(); + offset = directive.parse({ + parent: this, + src: src + }, offset); + this.directives.push(directive); + hasDirectives = true; + atLineStart = false; + } + break; + + default: + if (hasDirectives) { + this.error = new YAMLSemanticError(this, 'Missing directives-end indicator line'); + } else if (this.directives.length > 0) { + this.contents = this.directives; + this.directives = []; + } + + return offset; + } + } + + if (src[offset]) { + this.directivesEndMarker = new Range(offset, offset + 3); + return offset + 3; + } + + if (hasDirectives) { + this.error = new YAMLSemanticError(this, 'Missing directives-end indicator line'); + } else if (this.directives.length > 0) { + this.contents = this.directives; + this.directives = []; + } + + return offset; + } + }, { + key: "parseContents", + value: function parseContents(start) { + var _this$context = this.context, + parseNode = _this$context.parseNode, + src = _this$context.src; + if (!this.contents) this.contents = []; + var lineStart = start; + + while (src[lineStart - 1] === '-') { + lineStart -= 1; + } + + var offset = Node.endOfWhiteSpace(src, start); + var atLineStart = lineStart === start; + this.valueRange = new Range(offset); + + while (!Node.atDocumentBoundary(src, offset, Char.DOCUMENT_END)) { + switch (src[offset]) { + case '\n': + if (atLineStart) { + var blankLine = new BlankLine(); + offset = blankLine.parse({ + src: src + }, offset); + + if (offset < src.length) { + this.contents.push(blankLine); + } + } else { + offset += 1; + atLineStart = true; + } + + lineStart = offset; + break; + + case '#': + { + var comment = new Comment(); + offset = comment.parse({ + src: src + }, offset); + this.contents.push(comment); + atLineStart = false; + } + break; + + default: + { + var iEnd = Node.endOfIndent(src, offset); + var context = { + atLineStart: atLineStart, + indent: -1, + inFlow: false, + inCollection: false, + lineStart: lineStart, + parent: this + }; + var node = parseNode(context, iEnd); + if (!node) return this.valueRange.end = iEnd; // at next document start + + this.contents.push(node); + offset = node.range.end; + atLineStart = false; + var ec = grabCollectionEndComments(node); + if (ec) Array.prototype.push.apply(this.contents, ec); + } + } + + offset = Document.startCommentOrEndBlankLine(src, offset); + } + + this.valueRange.end = offset; + + if (src[offset]) { + this.documentEndMarker = new Range(offset, offset + 3); + offset += 3; + + if (src[offset]) { + offset = Node.endOfWhiteSpace(src, offset); + + if (src[offset] === '#') { + var _comment = new Comment(); + + offset = _comment.parse({ + src: src + }, offset); + this.contents.push(_comment); + } + + switch (src[offset]) { + case '\n': + offset += 1; + break; + + case undefined: + break; + + default: + this.error = new YAMLSyntaxError(this, 'Document end marker line cannot have a non-comment suffix'); + } + } + } + + return offset; + } + /** + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this + */ + + }, { + key: "parse", + value: function parse(context, start) { + context.root = this; + this.context = context; + var src = context.src; + var offset = src.charCodeAt(start) === 0xfeff ? start + 1 : start; // skip BOM + + offset = this.parseDirectives(offset); + offset = this.parseContents(offset); + return offset; + } + }, { + key: "setOrigRanges", + value: function setOrigRanges(cr, offset) { + offset = _get(_getPrototypeOf(Document.prototype), "setOrigRanges", this).call(this, cr, offset); + this.directives.forEach(function (node) { + offset = node.setOrigRanges(cr, offset); + }); + if (this.directivesEndMarker) offset = this.directivesEndMarker.setOrigRange(cr, offset); + this.contents.forEach(function (node) { + offset = node.setOrigRanges(cr, offset); + }); + if (this.documentEndMarker) offset = this.documentEndMarker.setOrigRange(cr, offset); + return offset; + } + }, { + key: "toString", + value: function toString() { + var contents = this.contents, + directives = this.directives, + value = this.value; + if (value != null) return value; + var str = directives.join(''); + + if (contents.length > 0) { + if (directives.length > 0 || contents[0].type === Type.COMMENT) str += '---\n'; + str += contents.join(''); + } + + if (str[str.length - 1] !== '\n') str += '\n'; + return str; + } + }], [{ + key: "startCommentOrEndBlankLine", + value: function startCommentOrEndBlankLine(src, start) { + var offset = Node.endOfWhiteSpace(src, start); + var ch = src[offset]; + return ch === '#' || ch === '\n' ? offset : start; + } + }]); + + return Document; +}(Node); + +var Alias = /*#__PURE__*/function (_Node) { + _inherits(Alias, _Node); + + var _super = _createSuper(Alias); + + function Alias() { + _classCallCheck(this, Alias); + + return _super.apply(this, arguments); + } + + _createClass(Alias, [{ + key: "parse", + value: + /** + * Parses an *alias from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar + */ + function parse(context, start) { + this.context = context; + var src = context.src; + var offset = Node.endOfIdentifier(src, start + 1); + this.valueRange = new Range(start + 1, offset); + offset = Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + return offset; + } + }]); + + return Alias; +}(Node); + +var Chomp = { + CLIP: 'CLIP', + KEEP: 'KEEP', + STRIP: 'STRIP' +}; +var BlockValue = /*#__PURE__*/function (_Node) { + _inherits(BlockValue, _Node); + + var _super = _createSuper(BlockValue); + + function BlockValue(type, props) { + var _this; + + _classCallCheck(this, BlockValue); + + _this = _super.call(this, type, props); + _this.blockIndent = null; + _this.chomping = Chomp.CLIP; + _this.header = null; + return _this; + } + + _createClass(BlockValue, [{ + key: "includesTrailingLines", + get: function get() { + return this.chomping === Chomp.KEEP; + } + }, { + key: "strValue", + get: function get() { + if (!this.valueRange || !this.context) return null; + var _this$valueRange = this.valueRange, + start = _this$valueRange.start, + end = _this$valueRange.end; + var _this$context = this.context, + indent = _this$context.indent, + src = _this$context.src; + if (this.valueRange.isEmpty()) return ''; + var lastNewLine = null; + var ch = src[end - 1]; + + while (ch === '\n' || ch === '\t' || ch === ' ') { + end -= 1; + + if (end <= start) { + if (this.chomping === Chomp.KEEP) break;else return ''; // probably never happens + } + + if (ch === '\n') lastNewLine = end; + ch = src[end - 1]; + } + + var keepStart = end + 1; + + if (lastNewLine) { + if (this.chomping === Chomp.KEEP) { + keepStart = lastNewLine; + end = this.valueRange.end; + } else { + end = lastNewLine; + } + } + + var bi = indent + this.blockIndent; + var folded = this.type === Type.BLOCK_FOLDED; + var atStart = true; + var str = ''; + var sep = ''; + var prevMoreIndented = false; + + for (var i = start; i < end; ++i) { + for (var j = 0; j < bi; ++j) { + if (src[i] !== ' ') break; + i += 1; + } + + var _ch = src[i]; + + if (_ch === '\n') { + if (sep === '\n') str += '\n';else sep = '\n'; + } else { + var lineEnd = Node.endOfLine(src, i); + var line = src.slice(i, lineEnd); + i = lineEnd; + + if (folded && (_ch === ' ' || _ch === '\t') && i < keepStart) { + if (sep === ' ') sep = '\n';else if (!prevMoreIndented && !atStart && sep === '\n') sep = '\n\n'; + str += sep + line; //+ ((lineEnd < end && src[lineEnd]) || '') + + sep = lineEnd < end && src[lineEnd] || ''; + prevMoreIndented = true; + } else { + str += sep + line; + sep = folded && i < keepStart ? ' ' : '\n'; + prevMoreIndented = false; + } + + if (atStart && line !== '') atStart = false; + } + } + + return this.chomping === Chomp.STRIP ? str : str + '\n'; + } + }, { + key: "parseBlockHeader", + value: function parseBlockHeader(start) { + var src = this.context.src; + var offset = start + 1; + var bi = ''; + + while (true) { + var ch = src[offset]; + + switch (ch) { + case '-': + this.chomping = Chomp.STRIP; + break; + + case '+': + this.chomping = Chomp.KEEP; + break; + + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + bi += ch; + break; + + default: + this.blockIndent = Number(bi) || null; + this.header = new Range(start, offset); + return offset; + } + + offset += 1; + } + } + }, { + key: "parseBlockValue", + value: function parseBlockValue(start) { + var _this$context2 = this.context, + indent = _this$context2.indent, + src = _this$context2.src; + var explicit = !!this.blockIndent; + var offset = start; + var valueEnd = start; + var minBlockIndent = 1; + + for (var ch = src[offset]; ch === '\n'; ch = src[offset]) { + offset += 1; + if (Node.atDocumentBoundary(src, offset)) break; + var end = Node.endOfBlockIndent(src, indent, offset); // should not include tab? + + if (end === null) break; + var _ch2 = src[end]; + var lineIndent = end - (offset + indent); + + if (!this.blockIndent) { + // no explicit block indent, none yet detected + if (src[end] !== '\n') { + // first line with non-whitespace content + if (lineIndent < minBlockIndent) { + var msg = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; + this.error = new YAMLSemanticError(this, msg); + } + + this.blockIndent = lineIndent; + } else if (lineIndent > minBlockIndent) { + // empty line with more whitespace + minBlockIndent = lineIndent; + } + } else if (_ch2 && _ch2 !== '\n' && lineIndent < this.blockIndent) { + if (src[end] === '#') break; + + if (!this.error) { + var _src = explicit ? 'explicit indentation indicator' : 'first line'; + + var _msg = "Block scalars must not be less indented than their ".concat(_src); + + this.error = new YAMLSemanticError(this, _msg); + } + } + + if (src[end] === '\n') { + offset = end; + } else { + offset = valueEnd = Node.endOfLine(src, end); + } + } + + if (this.chomping !== Chomp.KEEP) { + offset = src[valueEnd] ? valueEnd + 1 : valueEnd; + } + + this.valueRange = new Range(start + 1, offset); + return offset; + } + /** + * Parses a block value from the source + * + * Accepted forms are: + * ``` + * BS + * block + * lines + * + * BS #comment + * block + * lines + * ``` + * where the block style BS matches the regexp `[|>][-+1-9]*` and block lines + * are empty or have an indent level greater than `indent`. + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this block + */ + + }, { + key: "parse", + value: function parse(context, start) { + this.context = context; + var src = context.src; + var offset = this.parseBlockHeader(start); + offset = Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + offset = this.parseBlockValue(offset); + return offset; + } + }, { + key: "setOrigRanges", + value: function setOrigRanges(cr, offset) { + offset = _get(_getPrototypeOf(BlockValue.prototype), "setOrigRanges", this).call(this, cr, offset); + return this.header ? this.header.setOrigRange(cr, offset) : offset; + } + }]); + + return BlockValue; +}(Node); + +var FlowCollection = /*#__PURE__*/function (_Node) { + _inherits(FlowCollection, _Node); + + var _super = _createSuper(FlowCollection); + + function FlowCollection(type, props) { + var _this; + + _classCallCheck(this, FlowCollection); + + _this = _super.call(this, type, props); + _this.items = null; + return _this; + } + + _createClass(FlowCollection, [{ + key: "prevNodeIsJsonLike", + value: function prevNodeIsJsonLike() { + var idx = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this.items.length; + var node = this.items[idx - 1]; + return !!node && (node.jsonLike || node.type === Type.COMMENT && this.prevNodeIsJsonLike(idx - 1)); + } + /** + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this + */ + + }, { + key: "parse", + value: function parse(context, start) { + this.context = context; + var parseNode = context.parseNode, + src = context.src; + var indent = context.indent, + lineStart = context.lineStart; + var char = src[start]; // { or [ + + this.items = [{ + char: char, + offset: start + }]; + var offset = Node.endOfWhiteSpace(src, start + 1); + char = src[offset]; + + while (char && char !== ']' && char !== '}') { + switch (char) { + case '\n': + { + lineStart = offset + 1; + var wsEnd = Node.endOfWhiteSpace(src, lineStart); + + if (src[wsEnd] === '\n') { + var blankLine = new BlankLine(); + lineStart = blankLine.parse({ + src: src + }, lineStart); + this.items.push(blankLine); + } + + offset = Node.endOfIndent(src, lineStart); + + if (offset <= lineStart + indent) { + char = src[offset]; + + if (offset < lineStart + indent || char !== ']' && char !== '}') { + var msg = 'Insufficient indentation in flow collection'; + this.error = new YAMLSemanticError(this, msg); + } + } + } + break; + + case ',': + { + this.items.push({ + char: char, + offset: offset + }); + offset += 1; + } + break; + + case '#': + { + var comment = new Comment(); + offset = comment.parse({ + src: src + }, offset); + this.items.push(comment); + } + break; + + case '?': + case ':': + { + var next = src[offset + 1]; + + if (next === '\n' || next === '\t' || next === ' ' || next === ',' || // in-flow : after JSON-like key does not need to be followed by whitespace + char === ':' && this.prevNodeIsJsonLike()) { + this.items.push({ + char: char, + offset: offset + }); + offset += 1; + break; + } + } + // fallthrough + + default: + { + var node = parseNode({ + atLineStart: false, + inCollection: false, + inFlow: true, + indent: -1, + lineStart: lineStart, + parent: this + }, offset); + + if (!node) { + // at next document start + this.valueRange = new Range(start, offset); + return offset; + } + + this.items.push(node); + offset = Node.normalizeOffset(src, node.range.end); + } + } + + offset = Node.endOfWhiteSpace(src, offset); + char = src[offset]; + } + + this.valueRange = new Range(start, offset + 1); + + if (char) { + this.items.push({ + char: char, + offset: offset + }); + offset = Node.endOfWhiteSpace(src, offset + 1); + offset = this.parseComment(offset); + } + + return offset; + } + }, { + key: "setOrigRanges", + value: function setOrigRanges(cr, offset) { + offset = _get(_getPrototypeOf(FlowCollection.prototype), "setOrigRanges", this).call(this, cr, offset); + this.items.forEach(function (node) { + if (node instanceof Node) { + offset = node.setOrigRanges(cr, offset); + } else if (cr.length === 0) { + node.origOffset = node.offset; + } else { + var i = offset; + + while (i < cr.length) { + if (cr[i] > node.offset) break;else ++i; + } + + node.origOffset = node.offset + i; + offset = i; + } + }); + return offset; + } + }, { + key: "toString", + value: function toString() { + var src = this.context.src, + items = this.items, + range = this.range, + value = this.value; + if (value != null) return value; + var nodes = items.filter(function (item) { + return item instanceof Node; + }); + var str = ''; + var prevEnd = range.start; + nodes.forEach(function (node) { + var prefix = src.slice(prevEnd, node.range.start); + prevEnd = node.range.end; + str += prefix + String(node); + + if (str[str.length - 1] === '\n' && src[prevEnd - 1] !== '\n' && src[prevEnd] === '\n') { + // Comment range does not include the terminal newline, but its + // stringified value does. Without this fix, newlines at comment ends + // get duplicated. + prevEnd += 1; + } + }); + str += src.slice(prevEnd, range.end); + return Node.addStringTerminator(src, range.end, str); + } + }]); + + return FlowCollection; +}(Node); + +var QuoteDouble = /*#__PURE__*/function (_Node) { + _inherits(QuoteDouble, _Node); + + var _super = _createSuper(QuoteDouble); + + function QuoteDouble() { + _classCallCheck(this, QuoteDouble); + + return _super.apply(this, arguments); + } + + _createClass(QuoteDouble, [{ + key: "strValue", + get: + /** + * @returns {string | { str: string, errors: YAMLSyntaxError[] }} + */ + function get() { + if (!this.valueRange || !this.context) return null; + var errors = []; + var _this$valueRange = this.valueRange, + start = _this$valueRange.start, + end = _this$valueRange.end; + var _this$context = this.context, + indent = _this$context.indent, + src = _this$context.src; + if (src[end - 1] !== '"') errors.push(new YAMLSyntaxError(this, 'Missing closing "quote')); // Using String#replace is too painful with escaped newlines preceded by + // escaped backslashes; also, this should be faster. + + var str = ''; + + for (var i = start + 1; i < end - 1; ++i) { + var ch = src[i]; + + if (ch === '\n') { + if (Node.atDocumentBoundary(src, i + 1)) errors.push(new YAMLSemanticError(this, 'Document boundary indicators are not allowed within string values')); + + var _Node$foldNewline = Node.foldNewline(src, i, indent), + fold = _Node$foldNewline.fold, + offset = _Node$foldNewline.offset, + error = _Node$foldNewline.error; + + str += fold; + i = offset; + if (error) errors.push(new YAMLSemanticError(this, 'Multi-line double-quoted string needs to be sufficiently indented')); + } else if (ch === '\\') { + i += 1; + + switch (src[i]) { + case '0': + str += '\0'; + break; + // null character + + case 'a': + str += '\x07'; + break; + // bell character + + case 'b': + str += '\b'; + break; + // backspace + + case 'e': + str += '\x1b'; + break; + // escape character + + case 'f': + str += '\f'; + break; + // form feed + + case 'n': + str += '\n'; + break; + // line feed + + case 'r': + str += '\r'; + break; + // carriage return + + case 't': + str += '\t'; + break; + // horizontal tab + + case 'v': + str += '\v'; + break; + // vertical tab + + case 'N': + str += "\x85"; + break; + // Unicode next line + + case '_': + str += "\xA0"; + break; + // Unicode non-breaking space + + case 'L': + str += "\u2028"; + break; + // Unicode line separator + + case 'P': + str += "\u2029"; + break; + // Unicode paragraph separator + + case ' ': + str += ' '; + break; + + case '"': + str += '"'; + break; + + case '/': + str += '/'; + break; + + case '\\': + str += '\\'; + break; + + case '\t': + str += '\t'; + break; + + case 'x': + str += this.parseCharCode(i + 1, 2, errors); + i += 2; + break; + + case 'u': + str += this.parseCharCode(i + 1, 4, errors); + i += 4; + break; + + case 'U': + str += this.parseCharCode(i + 1, 8, errors); + i += 8; + break; + + case '\n': + // skip escaped newlines, but still trim the following line + while (src[i + 1] === ' ' || src[i + 1] === '\t') { + i += 1; + } + + break; + + default: + errors.push(new YAMLSyntaxError(this, "Invalid escape sequence ".concat(src.substr(i - 1, 2)))); + str += '\\' + src[i]; + } + } else if (ch === ' ' || ch === '\t') { + // trim trailing whitespace + var wsStart = i; + var next = src[i + 1]; + + while (next === ' ' || next === '\t') { + i += 1; + next = src[i + 1]; + } + + if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch; + } else { + str += ch; + } + } + + return errors.length > 0 ? { + errors: errors, + str: str + } : str; + } + }, { + key: "parseCharCode", + value: function parseCharCode(offset, length, errors) { + var src = this.context.src; + var cc = src.substr(offset, length); + var ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); + var code = ok ? parseInt(cc, 16) : NaN; + + if (isNaN(code)) { + errors.push(new YAMLSyntaxError(this, "Invalid escape sequence ".concat(src.substr(offset - 2, length + 2)))); + return src.substr(offset - 2, length + 2); + } + + return String.fromCodePoint(code); + } + /** + * Parses a "double quoted" value from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar + */ + + }, { + key: "parse", + value: function parse(context, start) { + this.context = context; + var src = context.src; + var offset = QuoteDouble.endOfQuote(src, start + 1); + this.valueRange = new Range(start, offset); + offset = Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + return offset; + } + }], [{ + key: "endOfQuote", + value: function endOfQuote(src, offset) { + var ch = src[offset]; + + while (ch && ch !== '"') { + offset += ch === '\\' ? 2 : 1; + ch = src[offset]; + } + + return offset + 1; + } + }]); + + return QuoteDouble; +}(Node); + +var QuoteSingle = /*#__PURE__*/function (_Node) { + _inherits(QuoteSingle, _Node); + + var _super = _createSuper(QuoteSingle); + + function QuoteSingle() { + _classCallCheck(this, QuoteSingle); + + return _super.apply(this, arguments); + } + + _createClass(QuoteSingle, [{ + key: "strValue", + get: + /** + * @returns {string | { str: string, errors: YAMLSyntaxError[] }} + */ + function get() { + if (!this.valueRange || !this.context) return null; + var errors = []; + var _this$valueRange = this.valueRange, + start = _this$valueRange.start, + end = _this$valueRange.end; + var _this$context = this.context, + indent = _this$context.indent, + src = _this$context.src; + if (src[end - 1] !== "'") errors.push(new YAMLSyntaxError(this, "Missing closing 'quote")); + var str = ''; + + for (var i = start + 1; i < end - 1; ++i) { + var ch = src[i]; + + if (ch === '\n') { + if (Node.atDocumentBoundary(src, i + 1)) errors.push(new YAMLSemanticError(this, 'Document boundary indicators are not allowed within string values')); + + var _Node$foldNewline = Node.foldNewline(src, i, indent), + fold = _Node$foldNewline.fold, + offset = _Node$foldNewline.offset, + error = _Node$foldNewline.error; + + str += fold; + i = offset; + if (error) errors.push(new YAMLSemanticError(this, 'Multi-line single-quoted string needs to be sufficiently indented')); + } else if (ch === "'") { + str += ch; + i += 1; + if (src[i] !== "'") errors.push(new YAMLSyntaxError(this, 'Unescaped single quote? This should not happen.')); + } else if (ch === ' ' || ch === '\t') { + // trim trailing whitespace + var wsStart = i; + var next = src[i + 1]; + + while (next === ' ' || next === '\t') { + i += 1; + next = src[i + 1]; + } + + if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch; + } else { + str += ch; + } + } + + return errors.length > 0 ? { + errors: errors, + str: str + } : str; + } + /** + * Parses a 'single quoted' value from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar + */ + + }, { + key: "parse", + value: function parse(context, start) { + this.context = context; + var src = context.src; + var offset = QuoteSingle.endOfQuote(src, start + 1); + this.valueRange = new Range(start, offset); + offset = Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + return offset; + } + }], [{ + key: "endOfQuote", + value: function endOfQuote(src, offset) { + var ch = src[offset]; + + while (ch) { + if (ch === "'") { + if (src[offset + 1] !== "'") break; + ch = src[offset += 2]; + } else { + ch = src[offset += 1]; + } + } + + return offset + 1; + } + }]); + + return QuoteSingle; +}(Node); + +function createNewNode(type, props) { + switch (type) { + case Type.ALIAS: + return new Alias(type, props); + + case Type.BLOCK_FOLDED: + case Type.BLOCK_LITERAL: + return new BlockValue(type, props); + + case Type.FLOW_MAP: + case Type.FLOW_SEQ: + return new FlowCollection(type, props); + + case Type.MAP_KEY: + case Type.MAP_VALUE: + case Type.SEQ_ITEM: + return new CollectionItem(type, props); + + case Type.COMMENT: + case Type.PLAIN: + return new PlainValue(type, props); + + case Type.QUOTE_DOUBLE: + return new QuoteDouble(type, props); + + case Type.QUOTE_SINGLE: + return new QuoteSingle(type, props); + + /* istanbul ignore next */ + + default: + return null; + // should never happen + } +} +/** + * @param {boolean} atLineStart - Node starts at beginning of line + * @param {boolean} inFlow - true if currently in a flow context + * @param {boolean} inCollection - true if currently in a collection context + * @param {number} indent - Current level of indentation + * @param {number} lineStart - Start of the current line + * @param {Node} parent - The parent of the node + * @param {string} src - Source of the YAML document + */ + + +var ParseContext = /*#__PURE__*/function () { + function ParseContext() { + var _this = this; + + var orig = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + atLineStart = _ref.atLineStart, + inCollection = _ref.inCollection, + inFlow = _ref.inFlow, + indent = _ref.indent, + lineStart = _ref.lineStart, + parent = _ref.parent; + + _classCallCheck(this, ParseContext); + + _defineProperty(this, "parseNode", function (overlay, start) { + if (Node.atDocumentBoundary(_this.src, start)) return null; + var context = new ParseContext(_this, overlay); + + var _context$parseProps = context.parseProps(start), + props = _context$parseProps.props, + type = _context$parseProps.type, + valueStart = _context$parseProps.valueStart; + + var node = createNewNode(type, props); + var offset = node.parse(context, valueStart); + node.range = new Range(start, offset); + /* istanbul ignore if */ + + if (offset <= start) { + // This should never happen, but if it does, let's make sure to at least + // step one character forward to avoid a busy loop. + node.error = new Error("Node#parse consumed no characters"); + node.error.parseEnd = offset; + node.error.source = node; + node.range.end = start + 1; + } + + if (context.nodeStartsCollection(node)) { + if (!node.error && !context.atLineStart && context.parent.type === Type.DOCUMENT) { + node.error = new YAMLSyntaxError(node, 'Block collection must not have preceding content here (e.g. directives-end indicator)'); + } + + var collection = new Collection(node); + offset = collection.parse(new ParseContext(context), offset); + collection.range = new Range(start, offset); + return collection; + } + + return node; + }); + + this.atLineStart = atLineStart != null ? atLineStart : orig.atLineStart || false; + this.inCollection = inCollection != null ? inCollection : orig.inCollection || false; + this.inFlow = inFlow != null ? inFlow : orig.inFlow || false; + this.indent = indent != null ? indent : orig.indent; + this.lineStart = lineStart != null ? lineStart : orig.lineStart; + this.parent = parent != null ? parent : orig.parent || {}; + this.root = orig.root; + this.src = orig.src; + } + + _createClass(ParseContext, [{ + key: "nodeStartsCollection", + value: function nodeStartsCollection(node) { + var inCollection = this.inCollection, + inFlow = this.inFlow, + src = this.src; + if (inCollection || inFlow) return false; + if (node instanceof CollectionItem) return true; // check for implicit key + + var offset = node.range.end; + if (src[offset] === '\n' || src[offset - 1] === '\n') return false; + offset = Node.endOfWhiteSpace(src, offset); + return src[offset] === ':'; + } // Anchor and tag are before type, which determines the node implementation + // class; hence this intermediate step. + + }, { + key: "parseProps", + value: function parseProps(offset) { + var inFlow = this.inFlow, + parent = this.parent, + src = this.src; + var props = []; + var lineHasProps = false; + offset = this.atLineStart ? Node.endOfIndent(src, offset) : Node.endOfWhiteSpace(src, offset); + var ch = src[offset]; + + while (ch === Char.ANCHOR || ch === Char.COMMENT || ch === Char.TAG || ch === '\n') { + if (ch === '\n') { + var inEnd = offset; + var lineStart = void 0; + + do { + lineStart = inEnd + 1; + inEnd = Node.endOfIndent(src, lineStart); + } while (src[inEnd] === '\n'); + + var indentDiff = inEnd - (lineStart + this.indent); + var noIndicatorAsIndent = parent.type === Type.SEQ_ITEM && parent.context.atLineStart; + if (src[inEnd] !== '#' && !Node.nextNodeIsIndented(src[inEnd], indentDiff, !noIndicatorAsIndent)) break; + this.atLineStart = true; + this.lineStart = lineStart; + lineHasProps = false; + offset = inEnd; + } else if (ch === Char.COMMENT) { + var end = Node.endOfLine(src, offset + 1); + props.push(new Range(offset, end)); + offset = end; + } else { + var _end = Node.endOfIdentifier(src, offset + 1); + + if (ch === Char.TAG && src[_end] === ',' && /^[a-zA-Z0-9-]+\.[a-zA-Z0-9-]+,\d\d\d\d(-\d\d){0,2}\/\S/.test(src.slice(offset + 1, _end + 13))) { + // Let's presume we're dealing with a YAML 1.0 domain tag here, rather + // than an empty but 'foo.bar' private-tagged node in a flow collection + // followed without whitespace by a plain string starting with a year + // or date divided by something. + _end = Node.endOfIdentifier(src, _end + 5); + } + + props.push(new Range(offset, _end)); + lineHasProps = true; + offset = Node.endOfWhiteSpace(src, _end); + } + + ch = src[offset]; + } // '- &a : b' has an anchor on an empty node + + + if (lineHasProps && ch === ':' && Node.atBlank(src, offset + 1, true)) offset -= 1; + var type = ParseContext.parseType(src, offset, inFlow); + return { + props: props, + type: type, + valueStart: offset + }; + } + /** + * Parses a node from the source + * @param {ParseContext} overlay + * @param {number} start - Index of first non-whitespace character for the node + * @returns {?Node} - null if at a document boundary + */ + + }], [{ + key: "parseType", + value: function parseType(src, offset, inFlow) { + switch (src[offset]) { + case '*': + return Type.ALIAS; + + case '>': + return Type.BLOCK_FOLDED; + + case '|': + return Type.BLOCK_LITERAL; + + case '{': + return Type.FLOW_MAP; + + case '[': + return Type.FLOW_SEQ; + + case '?': + return !inFlow && Node.atBlank(src, offset + 1, true) ? Type.MAP_KEY : Type.PLAIN; + + case ':': + return !inFlow && Node.atBlank(src, offset + 1, true) ? Type.MAP_VALUE : Type.PLAIN; + + case '-': + return !inFlow && Node.atBlank(src, offset + 1, true) ? Type.SEQ_ITEM : Type.PLAIN; + + case '"': + return Type.QUOTE_DOUBLE; + + case "'": + return Type.QUOTE_SINGLE; + + default: + return Type.PLAIN; + } + } + }]); + + return ParseContext; +}(); + +// Published as 'yaml/parse-cst' +function parse(src) { + var cr = []; + + if (src.indexOf('\r') !== -1) { + src = src.replace(/\r\n?/g, function (match, offset) { + if (match.length > 1) cr.push(offset); + return '\n'; + }); + } + + var documents = []; + var offset = 0; + + do { + var doc = new Document(); + var context = new ParseContext({ + src: src + }); + offset = doc.parse(context, offset); + documents.push(doc); + } while (offset < src.length); + + documents.setOrigRanges = function () { + if (cr.length === 0) return false; + + for (var i = 1; i < cr.length; ++i) { + cr[i] -= i; + } + + var crOffset = 0; + + for (var _i = 0; _i < documents.length; ++_i) { + crOffset = documents[_i].setOrigRanges(cr, crOffset); + } + + cr.splice(0, cr.length); + return true; + }; + + documents.toString = function () { + return documents.join('...\n'); + }; + + return documents; +} + +export { parse }; diff --git a/node_modules/yaml/browser/dist/resolveSeq-492ab440.js b/node_modules/yaml/browser/dist/resolveSeq-492ab440.js new file mode 100644 index 0000000..50c7c19 --- /dev/null +++ b/node_modules/yaml/browser/dist/resolveSeq-492ab440.js @@ -0,0 +1,2419 @@ +import { c as _classCallCheck, j as _inherits, k as _createSuper, b as _createClass, e as _defineProperty, p as _assertThisInitialized, a as _typeof, q as _toArray, T as Type, _ as _createForOfIteratorHelper, l as _get, m as _getPrototypeOf, o as YAMLReferenceError, r as _possibleConstructorReturn, h as _slicedToArray, g as YAMLSemanticError, n as defaultTags, f as YAMLWarning, C as Char, Y as YAMLSyntaxError, P as PlainValue } from './PlainValue-b8036b75.js'; + +function addCommentBefore(str, indent, comment) { + if (!comment) return str; + var cc = comment.replace(/[\s\S]^/gm, "$&".concat(indent, "#")); + return "#".concat(cc, "\n").concat(indent).concat(str); +} +function addComment(str, indent, comment) { + return !comment ? str : comment.indexOf('\n') === -1 ? "".concat(str, " #").concat(comment) : "".concat(str, "\n") + comment.replace(/^/gm, "".concat(indent || '', "#")); +} + +var Node = function Node() { + _classCallCheck(this, Node); +}; + +function toJSON(value, arg, ctx) { + if (Array.isArray(value)) return value.map(function (v, i) { + return toJSON(v, String(i), ctx); + }); + + if (value && typeof value.toJSON === 'function') { + var anchor = ctx && ctx.anchors && ctx.anchors.get(value); + if (anchor) ctx.onCreate = function (res) { + anchor.res = res; + delete ctx.onCreate; + }; + var res = value.toJSON(arg, ctx); + if (anchor && ctx.onCreate) ctx.onCreate(res); + return res; + } + + if ((!ctx || !ctx.keep) && typeof value === 'bigint') return Number(value); + return value; +} + +var Scalar = /*#__PURE__*/function (_Node) { + _inherits(Scalar, _Node); + + var _super = _createSuper(Scalar); + + function Scalar(value) { + var _this; + + _classCallCheck(this, Scalar); + + _this = _super.call(this); + _this.value = value; + return _this; + } + + _createClass(Scalar, [{ + key: "toJSON", + value: function toJSON$1(arg, ctx) { + return ctx && ctx.keep ? this.value : toJSON(this.value, arg, ctx); + } + }, { + key: "toString", + value: function toString() { + return String(this.value); + } + }]); + + return Scalar; +}(Node); + +function collectionFromPath(schema, path, value) { + var v = value; + + for (var i = path.length - 1; i >= 0; --i) { + var k = path[i]; + + if (Number.isInteger(k) && k >= 0) { + var a = []; + a[k] = v; + v = a; + } else { + var o = {}; + Object.defineProperty(o, k, { + value: v, + writable: true, + enumerable: true, + configurable: true + }); + v = o; + } + } + + return schema.createNode(v, false); +} // null, undefined, or an empty non-string iterable (e.g. []) + + +var isEmptyPath = function isEmptyPath(path) { + return path == null || _typeof(path) === 'object' && path[Symbol.iterator]().next().done; +}; +var Collection = /*#__PURE__*/function (_Node) { + _inherits(Collection, _Node); + + var _super = _createSuper(Collection); + + function Collection(schema) { + var _this; + + _classCallCheck(this, Collection); + + _this = _super.call(this); + + _defineProperty(_assertThisInitialized(_this), "items", []); + + _this.schema = schema; + return _this; + } + + _createClass(Collection, [{ + key: "addIn", + value: function addIn(path, value) { + if (isEmptyPath(path)) this.add(value);else { + var _path = _toArray(path), + key = _path[0], + rest = _path.slice(1); + + var node = this.get(key, true); + if (node instanceof Collection) node.addIn(rest, value);else if (node === undefined && this.schema) this.set(key, collectionFromPath(this.schema, rest, value));else throw new Error("Expected YAML collection at ".concat(key, ". Remaining path: ").concat(rest)); + } + } + }, { + key: "deleteIn", + value: function deleteIn(_ref) { + var _ref2 = _toArray(_ref), + key = _ref2[0], + rest = _ref2.slice(1); + + if (rest.length === 0) return this.delete(key); + var node = this.get(key, true); + if (node instanceof Collection) return node.deleteIn(rest);else throw new Error("Expected YAML collection at ".concat(key, ". Remaining path: ").concat(rest)); + } + }, { + key: "getIn", + value: function getIn(_ref3, keepScalar) { + var _ref4 = _toArray(_ref3), + key = _ref4[0], + rest = _ref4.slice(1); + + var node = this.get(key, true); + if (rest.length === 0) return !keepScalar && node instanceof Scalar ? node.value : node;else return node instanceof Collection ? node.getIn(rest, keepScalar) : undefined; + } + }, { + key: "hasAllNullValues", + value: function hasAllNullValues() { + return this.items.every(function (node) { + if (!node || node.type !== 'PAIR') return false; + var n = node.value; + return n == null || n instanceof Scalar && n.value == null && !n.commentBefore && !n.comment && !n.tag; + }); + } + }, { + key: "hasIn", + value: function hasIn(_ref5) { + var _ref6 = _toArray(_ref5), + key = _ref6[0], + rest = _ref6.slice(1); + + if (rest.length === 0) return this.has(key); + var node = this.get(key, true); + return node instanceof Collection ? node.hasIn(rest) : false; + } + }, { + key: "setIn", + value: function setIn(_ref7, value) { + var _ref8 = _toArray(_ref7), + key = _ref8[0], + rest = _ref8.slice(1); + + if (rest.length === 0) { + this.set(key, value); + } else { + var node = this.get(key, true); + if (node instanceof Collection) node.setIn(rest, value);else if (node === undefined && this.schema) this.set(key, collectionFromPath(this.schema, rest, value));else throw new Error("Expected YAML collection at ".concat(key, ". Remaining path: ").concat(rest)); + } + } // overridden in implementations + + /* istanbul ignore next */ + + }, { + key: "toJSON", + value: function toJSON() { + return null; + } + }, { + key: "toString", + value: function toString(ctx, _ref9, onComment, onChompKeep) { + var _this2 = this; + + var blockItem = _ref9.blockItem, + flowChars = _ref9.flowChars, + isMap = _ref9.isMap, + itemIndent = _ref9.itemIndent; + var _ctx = ctx, + indent = _ctx.indent, + indentStep = _ctx.indentStep, + stringify = _ctx.stringify; + var inFlow = this.type === Type.FLOW_MAP || this.type === Type.FLOW_SEQ || ctx.inFlow; + if (inFlow) itemIndent += indentStep; + var allNullValues = isMap && this.hasAllNullValues(); + ctx = Object.assign({}, ctx, { + allNullValues: allNullValues, + indent: itemIndent, + inFlow: inFlow, + type: null + }); + var chompKeep = false; + var hasItemWithNewLine = false; + var nodes = this.items.reduce(function (nodes, item, i) { + var comment; + + if (item) { + if (!chompKeep && item.spaceBefore) nodes.push({ + type: 'comment', + str: '' + }); + if (item.commentBefore) item.commentBefore.match(/^.*$/gm).forEach(function (line) { + nodes.push({ + type: 'comment', + str: "#".concat(line) + }); + }); + if (item.comment) comment = item.comment; + if (inFlow && (!chompKeep && item.spaceBefore || item.commentBefore || item.comment || item.key && (item.key.commentBefore || item.key.comment) || item.value && (item.value.commentBefore || item.value.comment))) hasItemWithNewLine = true; + } + + chompKeep = false; + var str = stringify(item, ctx, function () { + return comment = null; + }, function () { + return chompKeep = true; + }); + if (inFlow && !hasItemWithNewLine && str.includes('\n')) hasItemWithNewLine = true; + if (inFlow && i < _this2.items.length - 1) str += ','; + str = addComment(str, itemIndent, comment); + if (chompKeep && (comment || inFlow)) chompKeep = false; + nodes.push({ + type: 'item', + str: str + }); + return nodes; + }, []); + var str; + + if (nodes.length === 0) { + str = flowChars.start + flowChars.end; + } else if (inFlow) { + var start = flowChars.start, + end = flowChars.end; + var strings = nodes.map(function (n) { + return n.str; + }); + + if (hasItemWithNewLine || strings.reduce(function (sum, str) { + return sum + str.length + 2; + }, 2) > Collection.maxFlowStringSingleLineLength) { + str = start; + + var _iterator = _createForOfIteratorHelper(strings), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var s = _step.value; + str += s ? "\n".concat(indentStep).concat(indent).concat(s) : '\n'; + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + str += "\n".concat(indent).concat(end); + } else { + str = "".concat(start, " ").concat(strings.join(' '), " ").concat(end); + } + } else { + var _strings = nodes.map(blockItem); + + str = _strings.shift(); + + var _iterator2 = _createForOfIteratorHelper(_strings), + _step2; + + try { + for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) { + var _s = _step2.value; + str += _s ? "\n".concat(indent).concat(_s) : '\n'; + } + } catch (err) { + _iterator2.e(err); + } finally { + _iterator2.f(); + } + } + + if (this.comment) { + str += '\n' + this.comment.replace(/^/gm, "".concat(indent, "#")); + if (onComment) onComment(); + } else if (chompKeep && onChompKeep) onChompKeep(); + + return str; + } + }]); + + return Collection; +}(Node); + +_defineProperty(Collection, "maxFlowStringSingleLineLength", 60); + +function asItemIndex(key) { + var idx = key instanceof Scalar ? key.value : key; + if (idx && typeof idx === 'string') idx = Number(idx); + return Number.isInteger(idx) && idx >= 0 ? idx : null; +} + +var YAMLSeq = /*#__PURE__*/function (_Collection) { + _inherits(YAMLSeq, _Collection); + + var _super = _createSuper(YAMLSeq); + + function YAMLSeq() { + _classCallCheck(this, YAMLSeq); + + return _super.apply(this, arguments); + } + + _createClass(YAMLSeq, [{ + key: "add", + value: function add(value) { + this.items.push(value); + } + }, { + key: "delete", + value: function _delete(key) { + var idx = asItemIndex(key); + if (typeof idx !== 'number') return false; + var del = this.items.splice(idx, 1); + return del.length > 0; + } + }, { + key: "get", + value: function get(key, keepScalar) { + var idx = asItemIndex(key); + if (typeof idx !== 'number') return undefined; + var it = this.items[idx]; + return !keepScalar && it instanceof Scalar ? it.value : it; + } + }, { + key: "has", + value: function has(key) { + var idx = asItemIndex(key); + return typeof idx === 'number' && idx < this.items.length; + } + }, { + key: "set", + value: function set(key, value) { + var idx = asItemIndex(key); + if (typeof idx !== 'number') throw new Error("Expected a valid index, not ".concat(key, ".")); + this.items[idx] = value; + } + }, { + key: "toJSON", + value: function toJSON$1(_, ctx) { + var seq = []; + if (ctx && ctx.onCreate) ctx.onCreate(seq); + var i = 0; + + var _iterator = _createForOfIteratorHelper(this.items), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var item = _step.value; + seq.push(toJSON(item, String(i++), ctx)); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return seq; + } + }, { + key: "toString", + value: function toString(ctx, onComment, onChompKeep) { + if (!ctx) return JSON.stringify(this); + return _get(_getPrototypeOf(YAMLSeq.prototype), "toString", this).call(this, ctx, { + blockItem: function blockItem(n) { + return n.type === 'comment' ? n.str : "- ".concat(n.str); + }, + flowChars: { + start: '[', + end: ']' + }, + isMap: false, + itemIndent: (ctx.indent || '') + ' ' + }, onComment, onChompKeep); + } + }]); + + return YAMLSeq; +}(Collection); + +var stringifyKey = function stringifyKey(key, jsKey, ctx) { + if (jsKey === null) return ''; + if (_typeof(jsKey) !== 'object') return String(jsKey); + if (key instanceof Node && ctx && ctx.doc) return key.toString({ + anchors: Object.create(null), + doc: ctx.doc, + indent: '', + indentStep: ctx.indentStep, + inFlow: true, + inStringifyKey: true, + stringify: ctx.stringify + }); + return JSON.stringify(jsKey); +}; + +var Pair = /*#__PURE__*/function (_Node) { + _inherits(Pair, _Node); + + var _super = _createSuper(Pair); + + function Pair(key) { + var _this; + + var value = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null; + + _classCallCheck(this, Pair); + + _this = _super.call(this); + _this.key = key; + _this.value = value; + _this.type = Pair.Type.PAIR; + return _this; + } + + _createClass(Pair, [{ + key: "commentBefore", + get: function get() { + return this.key instanceof Node ? this.key.commentBefore : undefined; + }, + set: function set(cb) { + if (this.key == null) this.key = new Scalar(null); + if (this.key instanceof Node) this.key.commentBefore = cb;else { + var msg = 'Pair.commentBefore is an alias for Pair.key.commentBefore. To set it, the key must be a Node.'; + throw new Error(msg); + } + } + }, { + key: "addToJSMap", + value: function addToJSMap(ctx, map) { + var key = toJSON(this.key, '', ctx); + + if (map instanceof Map) { + var value = toJSON(this.value, key, ctx); + map.set(key, value); + } else if (map instanceof Set) { + map.add(key); + } else { + var stringKey = stringifyKey(this.key, key, ctx); + + var _value = toJSON(this.value, stringKey, ctx); + + if (stringKey in map) Object.defineProperty(map, stringKey, { + value: _value, + writable: true, + enumerable: true, + configurable: true + });else map[stringKey] = _value; + } + + return map; + } + }, { + key: "toJSON", + value: function toJSON(_, ctx) { + var pair = ctx && ctx.mapAsMap ? new Map() : {}; + return this.addToJSMap(ctx, pair); + } + }, { + key: "toString", + value: function toString(ctx, onComment, onChompKeep) { + if (!ctx || !ctx.doc) return JSON.stringify(this); + var _ctx$doc$options = ctx.doc.options, + indentSize = _ctx$doc$options.indent, + indentSeq = _ctx$doc$options.indentSeq, + simpleKeys = _ctx$doc$options.simpleKeys; + var key = this.key, + value = this.value; + var keyComment = key instanceof Node && key.comment; + + if (simpleKeys) { + if (keyComment) { + throw new Error('With simple keys, key nodes cannot have comments'); + } + + if (key instanceof Collection) { + var msg = 'With simple keys, collection cannot be used as a key value'; + throw new Error(msg); + } + } + + var explicitKey = !simpleKeys && (!key || keyComment || (key instanceof Node ? key instanceof Collection || key.type === Type.BLOCK_FOLDED || key.type === Type.BLOCK_LITERAL : _typeof(key) === 'object')); + var _ctx = ctx, + doc = _ctx.doc, + indent = _ctx.indent, + indentStep = _ctx.indentStep, + stringify = _ctx.stringify; + ctx = Object.assign({}, ctx, { + implicitKey: !explicitKey, + indent: indent + indentStep + }); + var chompKeep = false; + var str = stringify(key, ctx, function () { + return keyComment = null; + }, function () { + return chompKeep = true; + }); + str = addComment(str, ctx.indent, keyComment); + + if (!explicitKey && str.length > 1024) { + if (simpleKeys) throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); + explicitKey = true; + } + + if (ctx.allNullValues && !simpleKeys) { + if (this.comment) { + str = addComment(str, ctx.indent, this.comment); + if (onComment) onComment(); + } else if (chompKeep && !keyComment && onChompKeep) onChompKeep(); + + return ctx.inFlow && !explicitKey ? str : "? ".concat(str); + } + + str = explicitKey ? "? ".concat(str, "\n").concat(indent, ":") : "".concat(str, ":"); + + if (this.comment) { + // expected (but not strictly required) to be a single-line comment + str = addComment(str, ctx.indent, this.comment); + if (onComment) onComment(); + } + + var vcb = ''; + var valueComment = null; + + if (value instanceof Node) { + if (value.spaceBefore) vcb = '\n'; + + if (value.commentBefore) { + var cs = value.commentBefore.replace(/^/gm, "".concat(ctx.indent, "#")); + vcb += "\n".concat(cs); + } + + valueComment = value.comment; + } else if (value && _typeof(value) === 'object') { + value = doc.schema.createNode(value, true); + } + + ctx.implicitKey = false; + if (!explicitKey && !this.comment && value instanceof Scalar) ctx.indentAtStart = str.length + 1; + chompKeep = false; + + if (!indentSeq && indentSize >= 2 && !ctx.inFlow && !explicitKey && value instanceof YAMLSeq && value.type !== Type.FLOW_SEQ && !value.tag && !doc.anchors.getName(value)) { + // If indentSeq === false, consider '- ' as part of indentation where possible + ctx.indent = ctx.indent.substr(2); + } + + var valueStr = stringify(value, ctx, function () { + return valueComment = null; + }, function () { + return chompKeep = true; + }); + var ws = ' '; + + if (vcb || this.comment) { + ws = "".concat(vcb, "\n").concat(ctx.indent); + } else if (!explicitKey && value instanceof Collection) { + var flow = valueStr[0] === '[' || valueStr[0] === '{'; + if (!flow || valueStr.includes('\n')) ws = "\n".concat(ctx.indent); + } else if (valueStr[0] === '\n') ws = ''; + + if (chompKeep && !valueComment && onChompKeep) onChompKeep(); + return addComment(str + ws + valueStr, ctx.indent, valueComment); + } + }]); + + return Pair; +}(Node); + +_defineProperty(Pair, "Type", { + PAIR: 'PAIR', + MERGE_PAIR: 'MERGE_PAIR' +}); + +var getAliasCount = function getAliasCount(node, anchors) { + if (node instanceof Alias) { + var anchor = anchors.get(node.source); + return anchor.count * anchor.aliasCount; + } else if (node instanceof Collection) { + var count = 0; + + var _iterator = _createForOfIteratorHelper(node.items), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var item = _step.value; + var c = getAliasCount(item, anchors); + if (c > count) count = c; + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return count; + } else if (node instanceof Pair) { + var kc = getAliasCount(node.key, anchors); + var vc = getAliasCount(node.value, anchors); + return Math.max(kc, vc); + } + + return 1; +}; + +var Alias = /*#__PURE__*/function (_Node) { + _inherits(Alias, _Node); + + var _super = _createSuper(Alias); + + function Alias(source) { + var _this; + + _classCallCheck(this, Alias); + + _this = _super.call(this); + _this.source = source; + _this.type = Type.ALIAS; + return _this; + } + + _createClass(Alias, [{ + key: "tag", + set: function set(t) { + throw new Error('Alias nodes cannot have tags'); + } + }, { + key: "toJSON", + value: function toJSON$1(arg, ctx) { + if (!ctx) return toJSON(this.source, arg, ctx); + var anchors = ctx.anchors, + maxAliasCount = ctx.maxAliasCount; + var anchor = anchors.get(this.source); + /* istanbul ignore if */ + + if (!anchor || anchor.res === undefined) { + var msg = 'This should not happen: Alias anchor was not resolved?'; + if (this.cstNode) throw new YAMLReferenceError(this.cstNode, msg);else throw new ReferenceError(msg); + } + + if (maxAliasCount >= 0) { + anchor.count += 1; + if (anchor.aliasCount === 0) anchor.aliasCount = getAliasCount(this.source, anchors); + + if (anchor.count * anchor.aliasCount > maxAliasCount) { + var _msg = 'Excessive alias count indicates a resource exhaustion attack'; + if (this.cstNode) throw new YAMLReferenceError(this.cstNode, _msg);else throw new ReferenceError(_msg); + } + } + + return anchor.res; + } // Only called when stringifying an alias mapping key while constructing + // Object output. + + }, { + key: "toString", + value: function toString(ctx) { + return Alias.stringify(this, ctx); + } + }], [{ + key: "stringify", + value: function stringify(_ref, _ref2) { + var range = _ref.range, + source = _ref.source; + var anchors = _ref2.anchors, + doc = _ref2.doc, + implicitKey = _ref2.implicitKey, + inStringifyKey = _ref2.inStringifyKey; + var anchor = Object.keys(anchors).find(function (a) { + return anchors[a] === source; + }); + if (!anchor && inStringifyKey) anchor = doc.anchors.getName(source) || doc.anchors.newName(); + if (anchor) return "*".concat(anchor).concat(implicitKey ? ' ' : ''); + var msg = doc.anchors.getName(source) ? 'Alias node must be after source node' : 'Source node not found for alias node'; + throw new Error("".concat(msg, " [").concat(range, "]")); + } + }]); + + return Alias; +}(Node); + +_defineProperty(Alias, "default", true); + +function findPair(items, key) { + var k = key instanceof Scalar ? key.value : key; + + var _iterator = _createForOfIteratorHelper(items), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var it = _step.value; + + if (it instanceof Pair) { + if (it.key === key || it.key === k) return it; + if (it.key && it.key.value === k) return it; + } + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return undefined; +} +var YAMLMap = /*#__PURE__*/function (_Collection) { + _inherits(YAMLMap, _Collection); + + var _super = _createSuper(YAMLMap); + + function YAMLMap() { + _classCallCheck(this, YAMLMap); + + return _super.apply(this, arguments); + } + + _createClass(YAMLMap, [{ + key: "add", + value: function add(pair, overwrite) { + if (!pair) pair = new Pair(pair);else if (!(pair instanceof Pair)) pair = new Pair(pair.key || pair, pair.value); + var prev = findPair(this.items, pair.key); + var sortEntries = this.schema && this.schema.sortMapEntries; + + if (prev) { + if (overwrite) prev.value = pair.value;else throw new Error("Key ".concat(pair.key, " already set")); + } else if (sortEntries) { + var i = this.items.findIndex(function (item) { + return sortEntries(pair, item) < 0; + }); + if (i === -1) this.items.push(pair);else this.items.splice(i, 0, pair); + } else { + this.items.push(pair); + } + } + }, { + key: "delete", + value: function _delete(key) { + var it = findPair(this.items, key); + if (!it) return false; + var del = this.items.splice(this.items.indexOf(it), 1); + return del.length > 0; + } + }, { + key: "get", + value: function get(key, keepScalar) { + var it = findPair(this.items, key); + var node = it && it.value; + return !keepScalar && node instanceof Scalar ? node.value : node; + } + }, { + key: "has", + value: function has(key) { + return !!findPair(this.items, key); + } + }, { + key: "set", + value: function set(key, value) { + this.add(new Pair(key, value), true); + } + /** + * @param {*} arg ignored + * @param {*} ctx Conversion context, originally set in Document#toJSON() + * @param {Class} Type If set, forces the returned collection type + * @returns {*} Instance of Type, Map, or Object + */ + + }, { + key: "toJSON", + value: function toJSON(_, ctx, Type) { + var map = Type ? new Type() : ctx && ctx.mapAsMap ? new Map() : {}; + if (ctx && ctx.onCreate) ctx.onCreate(map); + + var _iterator2 = _createForOfIteratorHelper(this.items), + _step2; + + try { + for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) { + var item = _step2.value; + item.addToJSMap(ctx, map); + } + } catch (err) { + _iterator2.e(err); + } finally { + _iterator2.f(); + } + + return map; + } + }, { + key: "toString", + value: function toString(ctx, onComment, onChompKeep) { + if (!ctx) return JSON.stringify(this); + + var _iterator3 = _createForOfIteratorHelper(this.items), + _step3; + + try { + for (_iterator3.s(); !(_step3 = _iterator3.n()).done;) { + var item = _step3.value; + if (!(item instanceof Pair)) throw new Error("Map items must all be pairs; found ".concat(JSON.stringify(item), " instead")); + } + } catch (err) { + _iterator3.e(err); + } finally { + _iterator3.f(); + } + + return _get(_getPrototypeOf(YAMLMap.prototype), "toString", this).call(this, ctx, { + blockItem: function blockItem(n) { + return n.str; + }, + flowChars: { + start: '{', + end: '}' + }, + isMap: true, + itemIndent: ctx.indent || '' + }, onComment, onChompKeep); + } + }]); + + return YAMLMap; +}(Collection); + +var MERGE_KEY = '<<'; +var Merge = /*#__PURE__*/function (_Pair) { + _inherits(Merge, _Pair); + + var _super = _createSuper(Merge); + + function Merge(pair) { + var _this; + + _classCallCheck(this, Merge); + + if (pair instanceof Pair) { + var seq = pair.value; + + if (!(seq instanceof YAMLSeq)) { + seq = new YAMLSeq(); + seq.items.push(pair.value); + seq.range = pair.value.range; + } + + _this = _super.call(this, pair.key, seq); + _this.range = pair.range; + } else { + _this = _super.call(this, new Scalar(MERGE_KEY), new YAMLSeq()); + } + + _this.type = Pair.Type.MERGE_PAIR; + return _possibleConstructorReturn(_this); + } // If the value associated with a merge key is a single mapping node, each of + // its key/value pairs is inserted into the current mapping, unless the key + // already exists in it. If the value associated with the merge key is a + // sequence, then this sequence is expected to contain mapping nodes and each + // of these nodes is merged in turn according to its order in the sequence. + // Keys in mapping nodes earlier in the sequence override keys specified in + // later mapping nodes. -- http://yaml.org/type/merge.html + + + _createClass(Merge, [{ + key: "addToJSMap", + value: function addToJSMap(ctx, map) { + var _iterator = _createForOfIteratorHelper(this.value.items), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var source = _step.value.source; + if (!(source instanceof YAMLMap)) throw new Error('Merge sources must be maps'); + var srcMap = source.toJSON(null, ctx, Map); + + var _iterator2 = _createForOfIteratorHelper(srcMap), + _step2; + + try { + for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) { + var _step2$value = _slicedToArray(_step2.value, 2), + key = _step2$value[0], + value = _step2$value[1]; + + if (map instanceof Map) { + if (!map.has(key)) map.set(key, value); + } else if (map instanceof Set) { + map.add(key); + } else if (!Object.prototype.hasOwnProperty.call(map, key)) { + Object.defineProperty(map, key, { + value: value, + writable: true, + enumerable: true, + configurable: true + }); + } + } + } catch (err) { + _iterator2.e(err); + } finally { + _iterator2.f(); + } + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return map; + } + }, { + key: "toString", + value: function toString(ctx, onComment) { + var seq = this.value; + if (seq.items.length > 1) return _get(_getPrototypeOf(Merge.prototype), "toString", this).call(this, ctx, onComment); + this.value = seq.items[0]; + + var str = _get(_getPrototypeOf(Merge.prototype), "toString", this).call(this, ctx, onComment); + + this.value = seq; + return str; + } + }]); + + return Merge; +}(Pair); + +var binaryOptions = { + defaultType: Type.BLOCK_LITERAL, + lineWidth: 76 +}; +var boolOptions = { + trueStr: 'true', + falseStr: 'false' +}; +var intOptions = { + asBigInt: false +}; +var nullOptions = { + nullStr: 'null' +}; +var strOptions = { + defaultType: Type.PLAIN, + doubleQuoted: { + jsonEncoding: false, + minMultiLineLength: 40 + }, + fold: { + lineWidth: 80, + minContentWidth: 20 + } +}; + +function resolveScalar(str, tags, scalarFallback) { + var _iterator = _createForOfIteratorHelper(tags), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var _step$value = _step.value, + format = _step$value.format, + test = _step$value.test, + resolve = _step$value.resolve; + + if (test) { + var match = str.match(test); + + if (match) { + var res = resolve.apply(null, match); + if (!(res instanceof Scalar)) res = new Scalar(res); + if (format) res.format = format; + return res; + } + } + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + if (scalarFallback) str = scalarFallback(str); + return new Scalar(str); +} + +var FOLD_FLOW = 'flow'; +var FOLD_BLOCK = 'block'; +var FOLD_QUOTED = 'quoted'; // presumes i+1 is at the start of a line +// returns index of last newline in more-indented block + +var consumeMoreIndentedLines = function consumeMoreIndentedLines(text, i) { + var ch = text[i + 1]; + + while (ch === ' ' || ch === '\t') { + do { + ch = text[i += 1]; + } while (ch && ch !== '\n'); + + ch = text[i + 1]; + } + + return i; +}; +/** + * Tries to keep input at up to `lineWidth` characters, splitting only on spaces + * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are + * terminated with `\n` and started with `indent`. + * + * @param {string} text + * @param {string} indent + * @param {string} [mode='flow'] `'block'` prevents more-indented lines + * from being folded; `'quoted'` allows for `\` escapes, including escaped + * newlines + * @param {Object} options + * @param {number} [options.indentAtStart] Accounts for leading contents on + * the first line, defaulting to `indent.length` + * @param {number} [options.lineWidth=80] + * @param {number} [options.minContentWidth=20] Allow highly indented lines to + * stretch the line width or indent content from the start + * @param {function} options.onFold Called once if the text is folded + * @param {function} options.onFold Called once if any line of text exceeds + * lineWidth characters + */ + + +function foldFlowLines(text, indent, mode, _ref) { + var indentAtStart = _ref.indentAtStart, + _ref$lineWidth = _ref.lineWidth, + lineWidth = _ref$lineWidth === void 0 ? 80 : _ref$lineWidth, + _ref$minContentWidth = _ref.minContentWidth, + minContentWidth = _ref$minContentWidth === void 0 ? 20 : _ref$minContentWidth, + onFold = _ref.onFold, + onOverflow = _ref.onOverflow; + if (!lineWidth || lineWidth < 0) return text; + var endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); + if (text.length <= endStep) return text; + var folds = []; + var escapedFolds = {}; + var end = lineWidth - indent.length; + + if (typeof indentAtStart === 'number') { + if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) folds.push(0);else end = lineWidth - indentAtStart; + } + + var split = undefined; + var prev = undefined; + var overflow = false; + var i = -1; + var escStart = -1; + var escEnd = -1; + + if (mode === FOLD_BLOCK) { + i = consumeMoreIndentedLines(text, i); + if (i !== -1) end = i + endStep; + } + + for (var ch; ch = text[i += 1];) { + if (mode === FOLD_QUOTED && ch === '\\') { + escStart = i; + + switch (text[i + 1]) { + case 'x': + i += 3; + break; + + case 'u': + i += 5; + break; + + case 'U': + i += 9; + break; + + default: + i += 1; + } + + escEnd = i; + } + + if (ch === '\n') { + if (mode === FOLD_BLOCK) i = consumeMoreIndentedLines(text, i); + end = i + endStep; + split = undefined; + } else { + if (ch === ' ' && prev && prev !== ' ' && prev !== '\n' && prev !== '\t') { + // space surrounded by non-space can be replaced with newline + indent + var next = text[i + 1]; + if (next && next !== ' ' && next !== '\n' && next !== '\t') split = i; + } + + if (i >= end) { + if (split) { + folds.push(split); + end = split + endStep; + split = undefined; + } else if (mode === FOLD_QUOTED) { + // white-space collected at end may stretch past lineWidth + while (prev === ' ' || prev === '\t') { + prev = ch; + ch = text[i += 1]; + overflow = true; + } // Account for newline escape, but don't break preceding escape + + + var j = i > escEnd + 1 ? i - 2 : escStart - 1; // Bail out if lineWidth & minContentWidth are shorter than an escape string + + if (escapedFolds[j]) return text; + folds.push(j); + escapedFolds[j] = true; + end = j + endStep; + split = undefined; + } else { + overflow = true; + } + } + } + + prev = ch; + } + + if (overflow && onOverflow) onOverflow(); + if (folds.length === 0) return text; + if (onFold) onFold(); + var res = text.slice(0, folds[0]); + + for (var _i = 0; _i < folds.length; ++_i) { + var fold = folds[_i]; + + var _end = folds[_i + 1] || text.length; + + if (fold === 0) res = "\n".concat(indent).concat(text.slice(0, _end));else { + if (mode === FOLD_QUOTED && escapedFolds[fold]) res += "".concat(text[fold], "\\"); + res += "\n".concat(indent).concat(text.slice(fold + 1, _end)); + } + } + + return res; +} + +var getFoldOptions = function getFoldOptions(_ref) { + var indentAtStart = _ref.indentAtStart; + return indentAtStart ? Object.assign({ + indentAtStart: indentAtStart + }, strOptions.fold) : strOptions.fold; +}; // Also checks for lines starting with %, as parsing the output as YAML 1.1 will +// presume that's starting a new document. + + +var containsDocumentMarker = function containsDocumentMarker(str) { + return /^(%|---|\.\.\.)/m.test(str); +}; + +function lineLengthOverLimit(str, lineWidth, indentLength) { + if (!lineWidth || lineWidth < 0) return false; + var limit = lineWidth - indentLength; + var strLen = str.length; + if (strLen <= limit) return false; + + for (var i = 0, start = 0; i < strLen; ++i) { + if (str[i] === '\n') { + if (i - start > limit) return true; + start = i + 1; + if (strLen - start <= limit) return false; + } + } + + return true; +} + +function doubleQuotedString(value, ctx) { + var implicitKey = ctx.implicitKey; + var _strOptions$doubleQuo = strOptions.doubleQuoted, + jsonEncoding = _strOptions$doubleQuo.jsonEncoding, + minMultiLineLength = _strOptions$doubleQuo.minMultiLineLength; + var json = JSON.stringify(value); + if (jsonEncoding) return json; + var indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); + var str = ''; + var start = 0; + + for (var i = 0, ch = json[i]; ch; ch = json[++i]) { + if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { + // space before newline needs to be escaped to not be folded + str += json.slice(start, i) + '\\ '; + i += 1; + start = i; + ch = '\\'; + } + + if (ch === '\\') switch (json[i + 1]) { + case 'u': + { + str += json.slice(start, i); + var code = json.substr(i + 2, 4); + + switch (code) { + case '0000': + str += '\\0'; + break; + + case '0007': + str += '\\a'; + break; + + case '000b': + str += '\\v'; + break; + + case '001b': + str += '\\e'; + break; + + case '0085': + str += '\\N'; + break; + + case '00a0': + str += '\\_'; + break; + + case '2028': + str += '\\L'; + break; + + case '2029': + str += '\\P'; + break; + + default: + if (code.substr(0, 2) === '00') str += '\\x' + code.substr(2);else str += json.substr(i, 6); + } + + i += 5; + start = i + 1; + } + break; + + case 'n': + if (implicitKey || json[i + 2] === '"' || json.length < minMultiLineLength) { + i += 1; + } else { + // folding will eat first newline + str += json.slice(start, i) + '\n\n'; + + while (json[i + 2] === '\\' && json[i + 3] === 'n' && json[i + 4] !== '"') { + str += '\n'; + i += 2; + } + + str += indent; // space after newline needs to be escaped to not be folded + + if (json[i + 2] === ' ') str += '\\'; + i += 1; + start = i + 1; + } + + break; + + default: + i += 1; + } + } + + str = start ? str + json.slice(start) : json; + return implicitKey ? str : foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx)); +} + +function singleQuotedString(value, ctx) { + if (ctx.implicitKey) { + if (/\n/.test(value)) return doubleQuotedString(value, ctx); + } else { + // single quoted string can't have leading or trailing whitespace around newline + if (/[ \t]\n|\n[ \t]/.test(value)) return doubleQuotedString(value, ctx); + } + + var indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); + var res = "'" + value.replace(/'/g, "''").replace(/\n+/g, "$&\n".concat(indent)) + "'"; + return ctx.implicitKey ? res : foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx)); +} + +function blockString(_ref2, ctx, onComment, onChompKeep) { + var comment = _ref2.comment, + type = _ref2.type, + value = _ref2.value; + + // 1. Block can't end in whitespace unless the last line is non-empty. + // 2. Strings consisting of only whitespace are best rendered explicitly. + if (/\n[\t ]+$/.test(value) || /^\s*$/.test(value)) { + return doubleQuotedString(value, ctx); + } + + var indent = ctx.indent || (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : ''); + var indentSize = indent ? '2' : '1'; // root is at -1 + + var literal = type === Type.BLOCK_FOLDED ? false : type === Type.BLOCK_LITERAL ? true : !lineLengthOverLimit(value, strOptions.fold.lineWidth, indent.length); + var header = literal ? '|' : '>'; + if (!value) return header + '\n'; + var wsStart = ''; + var wsEnd = ''; + value = value.replace(/[\n\t ]*$/, function (ws) { + var n = ws.indexOf('\n'); + + if (n === -1) { + header += '-'; // strip + } else if (value === ws || n !== ws.length - 1) { + header += '+'; // keep + + if (onChompKeep) onChompKeep(); + } + + wsEnd = ws.replace(/\n$/, ''); + return ''; + }).replace(/^[\n ]*/, function (ws) { + if (ws.indexOf(' ') !== -1) header += indentSize; + var m = ws.match(/ +$/); + + if (m) { + wsStart = ws.slice(0, -m[0].length); + return m[0]; + } else { + wsStart = ws; + return ''; + } + }); + if (wsEnd) wsEnd = wsEnd.replace(/\n+(?!\n|$)/g, "$&".concat(indent)); + if (wsStart) wsStart = wsStart.replace(/\n+/g, "$&".concat(indent)); + + if (comment) { + header += ' #' + comment.replace(/ ?[\r\n]+/g, ' '); + if (onComment) onComment(); + } + + if (!value) return "".concat(header).concat(indentSize, "\n").concat(indent).concat(wsEnd); + + if (literal) { + value = value.replace(/\n+/g, "$&".concat(indent)); + return "".concat(header, "\n").concat(indent).concat(wsStart).concat(value).concat(wsEnd); + } + + value = value.replace(/\n+/g, '\n$&').replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded + // ^ ind.line ^ empty ^ capture next empty lines only at end of indent + .replace(/\n+/g, "$&".concat(indent)); + var body = foldFlowLines("".concat(wsStart).concat(value).concat(wsEnd), indent, FOLD_BLOCK, strOptions.fold); + return "".concat(header, "\n").concat(indent).concat(body); +} + +function plainString(item, ctx, onComment, onChompKeep) { + var comment = item.comment, + type = item.type, + value = item.value; + var actualString = ctx.actualString, + implicitKey = ctx.implicitKey, + indent = ctx.indent, + inFlow = ctx.inFlow; + + if (implicitKey && /[\n[\]{},]/.test(value) || inFlow && /[[\]{},]/.test(value)) { + return doubleQuotedString(value, ctx); + } + + if (!value || /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { + // not allowed: + // - empty string, '-' or '?' + // - start with an indicator character (except [?:-]) or /[?-] / + // - '\n ', ': ' or ' \n' anywhere + // - '#' not preceded by a non-space char + // - end with ' ' or ':' + return implicitKey || inFlow || value.indexOf('\n') === -1 ? value.indexOf('"') !== -1 && value.indexOf("'") === -1 ? singleQuotedString(value, ctx) : doubleQuotedString(value, ctx) : blockString(item, ctx, onComment, onChompKeep); + } + + if (!implicitKey && !inFlow && type !== Type.PLAIN && value.indexOf('\n') !== -1) { + // Where allowed & type not set explicitly, prefer block style for multiline strings + return blockString(item, ctx, onComment, onChompKeep); + } + + if (indent === '' && containsDocumentMarker(value)) { + ctx.forceBlockIndent = true; + return blockString(item, ctx, onComment, onChompKeep); + } + + var str = value.replace(/\n+/g, "$&\n".concat(indent)); // Verify that output will be parsed as a string, as e.g. plain numbers and + // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), + // and others in v1.1. + + if (actualString) { + var tags = ctx.doc.schema.tags; + var resolved = resolveScalar(str, tags, tags.scalarFallback).value; + if (typeof resolved !== 'string') return doubleQuotedString(value, ctx); + } + + var body = implicitKey ? str : foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx)); + + if (comment && !inFlow && (body.indexOf('\n') !== -1 || comment.indexOf('\n') !== -1)) { + if (onComment) onComment(); + return addCommentBefore(body, indent, comment); + } + + return body; +} + +function stringifyString(item, ctx, onComment, onChompKeep) { + var defaultType = strOptions.defaultType; + var implicitKey = ctx.implicitKey, + inFlow = ctx.inFlow; + var _item = item, + type = _item.type, + value = _item.value; + + if (typeof value !== 'string') { + value = String(value); + item = Object.assign({}, item, { + value: value + }); + } + + var _stringify = function _stringify(_type) { + switch (_type) { + case Type.BLOCK_FOLDED: + case Type.BLOCK_LITERAL: + return blockString(item, ctx, onComment, onChompKeep); + + case Type.QUOTE_DOUBLE: + return doubleQuotedString(value, ctx); + + case Type.QUOTE_SINGLE: + return singleQuotedString(value, ctx); + + case Type.PLAIN: + return plainString(item, ctx, onComment, onChompKeep); + + default: + return null; + } + }; + + if (type !== Type.QUOTE_DOUBLE && /[\x00-\x08\x0b-\x1f\x7f-\x9f]/.test(value)) { + // force double quotes on control characters + type = Type.QUOTE_DOUBLE; + } else if ((implicitKey || inFlow) && (type === Type.BLOCK_FOLDED || type === Type.BLOCK_LITERAL)) { + // should not happen; blocks are not valid inside flow containers + type = Type.QUOTE_DOUBLE; + } + + var res = _stringify(type); + + if (res === null) { + res = _stringify(defaultType); + if (res === null) throw new Error("Unsupported default string type ".concat(defaultType)); + } + + return res; +} + +function stringifyNumber(_ref) { + var format = _ref.format, + minFractionDigits = _ref.minFractionDigits, + tag = _ref.tag, + value = _ref.value; + if (typeof value === 'bigint') return String(value); + if (!isFinite(value)) return isNaN(value) ? '.nan' : value < 0 ? '-.inf' : '.inf'; + var n = JSON.stringify(value); + + if (!format && minFractionDigits && (!tag || tag === 'tag:yaml.org,2002:float') && /^\d/.test(n)) { + var i = n.indexOf('.'); + + if (i < 0) { + i = n.length; + n += '.'; + } + + var d = minFractionDigits - (n.length - i - 1); + + while (d-- > 0) { + n += '0'; + } + } + + return n; +} + +function checkFlowCollectionEnd(errors, cst) { + var char, name; + + switch (cst.type) { + case Type.FLOW_MAP: + char = '}'; + name = 'flow map'; + break; + + case Type.FLOW_SEQ: + char = ']'; + name = 'flow sequence'; + break; + + default: + errors.push(new YAMLSemanticError(cst, 'Not a flow collection!?')); + return; + } + + var lastItem; + + for (var i = cst.items.length - 1; i >= 0; --i) { + var item = cst.items[i]; + + if (!item || item.type !== Type.COMMENT) { + lastItem = item; + break; + } + } + + if (lastItem && lastItem.char !== char) { + var msg = "Expected ".concat(name, " to end with ").concat(char); + var err; + + if (typeof lastItem.offset === 'number') { + err = new YAMLSemanticError(cst, msg); + err.offset = lastItem.offset + 1; + } else { + err = new YAMLSemanticError(lastItem, msg); + if (lastItem.range && lastItem.range.end) err.offset = lastItem.range.end - lastItem.range.start; + } + + errors.push(err); + } +} +function checkFlowCommentSpace(errors, comment) { + var prev = comment.context.src[comment.range.start - 1]; + + if (prev !== '\n' && prev !== '\t' && prev !== ' ') { + var msg = 'Comments must be separated from other tokens by white space characters'; + errors.push(new YAMLSemanticError(comment, msg)); + } +} +function getLongKeyError(source, key) { + var sk = String(key); + var k = sk.substr(0, 8) + '...' + sk.substr(-8); + return new YAMLSemanticError(source, "The \"".concat(k, "\" key is too long")); +} +function resolveComments(collection, comments) { + var _iterator = _createForOfIteratorHelper(comments), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var _step$value = _step.value, + afterKey = _step$value.afterKey, + before = _step$value.before, + comment = _step$value.comment; + var item = collection.items[before]; + + if (!item) { + if (comment !== undefined) { + if (collection.comment) collection.comment += '\n' + comment;else collection.comment = comment; + } + } else { + if (afterKey && item.value) item = item.value; + + if (comment === undefined) { + if (afterKey || !item.commentBefore) item.spaceBefore = true; + } else { + if (item.commentBefore) item.commentBefore += '\n' + comment;else item.commentBefore = comment; + } + } + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } +} + +// on error, will return { str: string, errors: Error[] } +function resolveString(doc, node) { + var res = node.strValue; + if (!res) return ''; + if (typeof res === 'string') return res; + res.errors.forEach(function (error) { + if (!error.source) error.source = node; + doc.errors.push(error); + }); + return res.str; +} + +function resolveTagHandle(doc, node) { + var _node$tag = node.tag, + handle = _node$tag.handle, + suffix = _node$tag.suffix; + var prefix = doc.tagPrefixes.find(function (p) { + return p.handle === handle; + }); + + if (!prefix) { + var dtp = doc.getDefaults().tagPrefixes; + if (dtp) prefix = dtp.find(function (p) { + return p.handle === handle; + }); + if (!prefix) throw new YAMLSemanticError(node, "The ".concat(handle, " tag handle is non-default and was not declared.")); + } + + if (!suffix) throw new YAMLSemanticError(node, "The ".concat(handle, " tag has no suffix.")); + + if (handle === '!' && (doc.version || doc.options.version) === '1.0') { + if (suffix[0] === '^') { + doc.warnings.push(new YAMLWarning(node, 'YAML 1.0 ^ tag expansion is not supported')); + return suffix; + } + + if (/[:/]/.test(suffix)) { + // word/foo -> tag:word.yaml.org,2002:foo + var vocab = suffix.match(/^([a-z0-9-]+)\/(.*)/i); + return vocab ? "tag:".concat(vocab[1], ".yaml.org,2002:").concat(vocab[2]) : "tag:".concat(suffix); + } + } + + return prefix.prefix + decodeURIComponent(suffix); +} + +function resolveTagName(doc, node) { + var tag = node.tag, + type = node.type; + var nonSpecific = false; + + if (tag) { + var handle = tag.handle, + suffix = tag.suffix, + verbatim = tag.verbatim; + + if (verbatim) { + if (verbatim !== '!' && verbatim !== '!!') return verbatim; + var msg = "Verbatim tags aren't resolved, so ".concat(verbatim, " is invalid."); + doc.errors.push(new YAMLSemanticError(node, msg)); + } else if (handle === '!' && !suffix) { + nonSpecific = true; + } else { + try { + return resolveTagHandle(doc, node); + } catch (error) { + doc.errors.push(error); + } + } + } + + switch (type) { + case Type.BLOCK_FOLDED: + case Type.BLOCK_LITERAL: + case Type.QUOTE_DOUBLE: + case Type.QUOTE_SINGLE: + return defaultTags.STR; + + case Type.FLOW_MAP: + case Type.MAP: + return defaultTags.MAP; + + case Type.FLOW_SEQ: + case Type.SEQ: + return defaultTags.SEQ; + + case Type.PLAIN: + return nonSpecific ? defaultTags.STR : null; + + default: + return null; + } +} + +function resolveByTagName(doc, node, tagName) { + var tags = doc.schema.tags; + var matchWithTest = []; + + var _iterator = _createForOfIteratorHelper(tags), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var tag = _step.value; + + if (tag.tag === tagName) { + if (tag.test) matchWithTest.push(tag);else { + var res = tag.resolve(doc, node); + return res instanceof Collection ? res : new Scalar(res); + } + } + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + var str = resolveString(doc, node); + if (typeof str === 'string' && matchWithTest.length > 0) return resolveScalar(str, matchWithTest, tags.scalarFallback); + return null; +} + +function getFallbackTagName(_ref) { + var type = _ref.type; + + switch (type) { + case Type.FLOW_MAP: + case Type.MAP: + return defaultTags.MAP; + + case Type.FLOW_SEQ: + case Type.SEQ: + return defaultTags.SEQ; + + default: + return defaultTags.STR; + } +} + +function resolveTag(doc, node, tagName) { + try { + var res = resolveByTagName(doc, node, tagName); + + if (res) { + if (tagName && node.tag) res.tag = tagName; + return res; + } + } catch (error) { + /* istanbul ignore if */ + if (!error.source) error.source = node; + doc.errors.push(error); + return null; + } + + try { + var fallback = getFallbackTagName(node); + if (!fallback) throw new Error("The tag ".concat(tagName, " is unavailable")); + var msg = "The tag ".concat(tagName, " is unavailable, falling back to ").concat(fallback); + doc.warnings.push(new YAMLWarning(node, msg)); + + var _res = resolveByTagName(doc, node, fallback); + + _res.tag = tagName; + return _res; + } catch (error) { + var refError = new YAMLReferenceError(node, error.message); + refError.stack = error.stack; + doc.errors.push(refError); + return null; + } +} + +var isCollectionItem = function isCollectionItem(node) { + if (!node) return false; + var type = node.type; + return type === Type.MAP_KEY || type === Type.MAP_VALUE || type === Type.SEQ_ITEM; +}; + +function resolveNodeProps(errors, node) { + var comments = { + before: [], + after: [] + }; + var hasAnchor = false; + var hasTag = false; + var props = isCollectionItem(node.context.parent) ? node.context.parent.props.concat(node.props) : node.props; + + var _iterator = _createForOfIteratorHelper(props), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var _step$value = _step.value, + start = _step$value.start, + end = _step$value.end; + + switch (node.context.src[start]) { + case Char.COMMENT: + { + if (!node.commentHasRequiredWhitespace(start)) { + var msg = 'Comments must be separated from other tokens by white space characters'; + errors.push(new YAMLSemanticError(node, msg)); + } + + var header = node.header, + valueRange = node.valueRange; + var cc = valueRange && (start > valueRange.start || header && start > header.start) ? comments.after : comments.before; + cc.push(node.context.src.slice(start + 1, end)); + break; + } + // Actual anchor & tag resolution is handled by schema, here we just complain + + case Char.ANCHOR: + if (hasAnchor) { + var _msg = 'A node can have at most one anchor'; + errors.push(new YAMLSemanticError(node, _msg)); + } + + hasAnchor = true; + break; + + case Char.TAG: + if (hasTag) { + var _msg2 = 'A node can have at most one tag'; + errors.push(new YAMLSemanticError(node, _msg2)); + } + + hasTag = true; + break; + } + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return { + comments: comments, + hasAnchor: hasAnchor, + hasTag: hasTag + }; +} + +function resolveNodeValue(doc, node) { + var anchors = doc.anchors, + errors = doc.errors, + schema = doc.schema; + + if (node.type === Type.ALIAS) { + var name = node.rawValue; + var src = anchors.getNode(name); + + if (!src) { + var msg = "Aliased anchor not found: ".concat(name); + errors.push(new YAMLReferenceError(node, msg)); + return null; + } // Lazy resolution for circular references + + + var res = new Alias(src); + + anchors._cstAliases.push(res); + + return res; + } + + var tagName = resolveTagName(doc, node); + if (tagName) return resolveTag(doc, node, tagName); + + if (node.type !== Type.PLAIN) { + var _msg3 = "Failed to resolve ".concat(node.type, " node here"); + + errors.push(new YAMLSyntaxError(node, _msg3)); + return null; + } + + try { + var str = resolveString(doc, node); + return resolveScalar(str, schema.tags, schema.tags.scalarFallback); + } catch (error) { + if (!error.source) error.source = node; + errors.push(error); + return null; + } +} // sets node.resolved on success + + +function resolveNode(doc, node) { + if (!node) return null; + if (node.error) doc.errors.push(node.error); + + var _resolveNodeProps = resolveNodeProps(doc.errors, node), + comments = _resolveNodeProps.comments, + hasAnchor = _resolveNodeProps.hasAnchor, + hasTag = _resolveNodeProps.hasTag; + + if (hasAnchor) { + var anchors = doc.anchors; + var name = node.anchor; + var prev = anchors.getNode(name); // At this point, aliases for any preceding node with the same anchor + // name have already been resolved, so it may safely be renamed. + + if (prev) anchors.map[anchors.newName(name)] = prev; // During parsing, we need to store the CST node in anchors.map as + // anchors need to be available during resolution to allow for + // circular references. + + anchors.map[name] = node; + } + + if (node.type === Type.ALIAS && (hasAnchor || hasTag)) { + var msg = 'An alias node must not specify any properties'; + doc.errors.push(new YAMLSemanticError(node, msg)); + } + + var res = resolveNodeValue(doc, node); + + if (res) { + res.range = [node.range.start, node.range.end]; + if (doc.options.keepCstNodes) res.cstNode = node; + if (doc.options.keepNodeTypes) res.type = node.type; + var cb = comments.before.join('\n'); + + if (cb) { + res.commentBefore = res.commentBefore ? "".concat(res.commentBefore, "\n").concat(cb) : cb; + } + + var ca = comments.after.join('\n'); + if (ca) res.comment = res.comment ? "".concat(res.comment, "\n").concat(ca) : ca; + } + + return node.resolved = res; +} + +function resolveMap(doc, cst) { + if (cst.type !== Type.MAP && cst.type !== Type.FLOW_MAP) { + var msg = "A ".concat(cst.type, " node cannot be resolved as a mapping"); + doc.errors.push(new YAMLSyntaxError(cst, msg)); + return null; + } + + var _ref = cst.type === Type.FLOW_MAP ? resolveFlowMapItems(doc, cst) : resolveBlockMapItems(doc, cst), + comments = _ref.comments, + items = _ref.items; + + var map = new YAMLMap(); + map.items = items; + resolveComments(map, comments); + var hasCollectionKey = false; + + for (var i = 0; i < items.length; ++i) { + var iKey = items[i].key; + if (iKey instanceof Collection) hasCollectionKey = true; + + if (doc.schema.merge && iKey && iKey.value === MERGE_KEY) { + items[i] = new Merge(items[i]); + var sources = items[i].value.items; + var error = null; + sources.some(function (node) { + if (node instanceof Alias) { + // During parsing, alias sources are CST nodes; to account for + // circular references their resolved values can't be used here. + var type = node.source.type; + if (type === Type.MAP || type === Type.FLOW_MAP) return false; + return error = 'Merge nodes aliases can only point to maps'; + } + + return error = 'Merge nodes can only have Alias nodes as values'; + }); + if (error) doc.errors.push(new YAMLSemanticError(cst, error)); + } else { + for (var j = i + 1; j < items.length; ++j) { + var jKey = items[j].key; + + if (iKey === jKey || iKey && jKey && Object.prototype.hasOwnProperty.call(iKey, 'value') && iKey.value === jKey.value) { + var _msg = "Map keys must be unique; \"".concat(iKey, "\" is repeated"); + + doc.errors.push(new YAMLSemanticError(cst, _msg)); + break; + } + } + } + } + + if (hasCollectionKey && !doc.options.mapAsMap) { + var warn = 'Keys with collection values will be stringified as YAML due to JS Object restrictions. Use mapAsMap: true to avoid this.'; + doc.warnings.push(new YAMLWarning(cst, warn)); + } + + cst.resolved = map; + return map; +} + +var valueHasPairComment = function valueHasPairComment(_ref2) { + var _ref2$context = _ref2.context, + lineStart = _ref2$context.lineStart, + node = _ref2$context.node, + src = _ref2$context.src, + props = _ref2.props; + if (props.length === 0) return false; + var start = props[0].start; + if (node && start > node.valueRange.start) return false; + if (src[start] !== Char.COMMENT) return false; + + for (var i = lineStart; i < start; ++i) { + if (src[i] === '\n') return false; + } + + return true; +}; + +function resolvePairComment(item, pair) { + if (!valueHasPairComment(item)) return; + var comment = item.getPropValue(0, Char.COMMENT, true); + var found = false; + var cb = pair.value.commentBefore; + + if (cb && cb.startsWith(comment)) { + pair.value.commentBefore = cb.substr(comment.length + 1); + found = true; + } else { + var cc = pair.value.comment; + + if (!item.node && cc && cc.startsWith(comment)) { + pair.value.comment = cc.substr(comment.length + 1); + found = true; + } + } + + if (found) pair.comment = comment; +} + +function resolveBlockMapItems(doc, cst) { + var comments = []; + var items = []; + var key = undefined; + var keyStart = null; + + for (var i = 0; i < cst.items.length; ++i) { + var item = cst.items[i]; + + switch (item.type) { + case Type.BLANK_LINE: + comments.push({ + afterKey: !!key, + before: items.length + }); + break; + + case Type.COMMENT: + comments.push({ + afterKey: !!key, + before: items.length, + comment: item.comment + }); + break; + + case Type.MAP_KEY: + if (key !== undefined) items.push(new Pair(key)); + if (item.error) doc.errors.push(item.error); + key = resolveNode(doc, item.node); + keyStart = null; + break; + + case Type.MAP_VALUE: + { + if (key === undefined) key = null; + if (item.error) doc.errors.push(item.error); + + if (!item.context.atLineStart && item.node && item.node.type === Type.MAP && !item.node.context.atLineStart) { + var msg = 'Nested mappings are not allowed in compact mappings'; + doc.errors.push(new YAMLSemanticError(item.node, msg)); + } + + var valueNode = item.node; + + if (!valueNode && item.props.length > 0) { + // Comments on an empty mapping value need to be preserved, so we + // need to construct a minimal empty node here to use instead of the + // missing `item.node`. -- eemeli/yaml#19 + valueNode = new PlainValue(Type.PLAIN, []); + valueNode.context = { + parent: item, + src: item.context.src + }; + var pos = item.range.start + 1; + valueNode.range = { + start: pos, + end: pos + }; + valueNode.valueRange = { + start: pos, + end: pos + }; + + if (typeof item.range.origStart === 'number') { + var origPos = item.range.origStart + 1; + valueNode.range.origStart = valueNode.range.origEnd = origPos; + valueNode.valueRange.origStart = valueNode.valueRange.origEnd = origPos; + } + } + + var pair = new Pair(key, resolveNode(doc, valueNode)); + resolvePairComment(item, pair); + items.push(pair); + + if (key && typeof keyStart === 'number') { + if (item.range.start > keyStart + 1024) doc.errors.push(getLongKeyError(cst, key)); + } + + key = undefined; + keyStart = null; + } + break; + + default: + if (key !== undefined) items.push(new Pair(key)); + key = resolveNode(doc, item); + keyStart = item.range.start; + if (item.error) doc.errors.push(item.error); + + next: for (var j = i + 1;; ++j) { + var nextItem = cst.items[j]; + + switch (nextItem && nextItem.type) { + case Type.BLANK_LINE: + case Type.COMMENT: + continue next; + + case Type.MAP_VALUE: + break next; + + default: + { + var _msg2 = 'Implicit map keys need to be followed by map values'; + doc.errors.push(new YAMLSemanticError(item, _msg2)); + break next; + } + } + } + + if (item.valueRangeContainsNewline) { + var _msg3 = 'Implicit map keys need to be on a single line'; + doc.errors.push(new YAMLSemanticError(item, _msg3)); + } + + } + } + + if (key !== undefined) items.push(new Pair(key)); + return { + comments: comments, + items: items + }; +} + +function resolveFlowMapItems(doc, cst) { + var comments = []; + var items = []; + var key = undefined; + var explicitKey = false; + var next = '{'; + + for (var i = 0; i < cst.items.length; ++i) { + var item = cst.items[i]; + + if (typeof item.char === 'string') { + var char = item.char, + offset = item.offset; + + if (char === '?' && key === undefined && !explicitKey) { + explicitKey = true; + next = ':'; + continue; + } + + if (char === ':') { + if (key === undefined) key = null; + + if (next === ':') { + next = ','; + continue; + } + } else { + if (explicitKey) { + if (key === undefined && char !== ',') key = null; + explicitKey = false; + } + + if (key !== undefined) { + items.push(new Pair(key)); + key = undefined; + + if (char === ',') { + next = ':'; + continue; + } + } + } + + if (char === '}') { + if (i === cst.items.length - 1) continue; + } else if (char === next) { + next = ':'; + continue; + } + + var msg = "Flow map contains an unexpected ".concat(char); + var err = new YAMLSyntaxError(cst, msg); + err.offset = offset; + doc.errors.push(err); + } else if (item.type === Type.BLANK_LINE) { + comments.push({ + afterKey: !!key, + before: items.length + }); + } else if (item.type === Type.COMMENT) { + checkFlowCommentSpace(doc.errors, item); + comments.push({ + afterKey: !!key, + before: items.length, + comment: item.comment + }); + } else if (key === undefined) { + if (next === ',') doc.errors.push(new YAMLSemanticError(item, 'Separator , missing in flow map')); + key = resolveNode(doc, item); + } else { + if (next !== ',') doc.errors.push(new YAMLSemanticError(item, 'Indicator : missing in flow map entry')); + items.push(new Pair(key, resolveNode(doc, item))); + key = undefined; + explicitKey = false; + } + } + + checkFlowCollectionEnd(doc.errors, cst); + if (key !== undefined) items.push(new Pair(key)); + return { + comments: comments, + items: items + }; +} + +function resolveSeq(doc, cst) { + if (cst.type !== Type.SEQ && cst.type !== Type.FLOW_SEQ) { + var msg = "A ".concat(cst.type, " node cannot be resolved as a sequence"); + doc.errors.push(new YAMLSyntaxError(cst, msg)); + return null; + } + + var _ref = cst.type === Type.FLOW_SEQ ? resolveFlowSeqItems(doc, cst) : resolveBlockSeqItems(doc, cst), + comments = _ref.comments, + items = _ref.items; + + var seq = new YAMLSeq(); + seq.items = items; + resolveComments(seq, comments); + + if (!doc.options.mapAsMap && items.some(function (it) { + return it instanceof Pair && it.key instanceof Collection; + })) { + var warn = 'Keys with collection values will be stringified as YAML due to JS Object restrictions. Use mapAsMap: true to avoid this.'; + doc.warnings.push(new YAMLWarning(cst, warn)); + } + + cst.resolved = seq; + return seq; +} + +function resolveBlockSeqItems(doc, cst) { + var comments = []; + var items = []; + + for (var i = 0; i < cst.items.length; ++i) { + var item = cst.items[i]; + + switch (item.type) { + case Type.BLANK_LINE: + comments.push({ + before: items.length + }); + break; + + case Type.COMMENT: + comments.push({ + comment: item.comment, + before: items.length + }); + break; + + case Type.SEQ_ITEM: + if (item.error) doc.errors.push(item.error); + items.push(resolveNode(doc, item.node)); + + if (item.hasProps) { + var msg = 'Sequence items cannot have tags or anchors before the - indicator'; + doc.errors.push(new YAMLSemanticError(item, msg)); + } + + break; + + default: + if (item.error) doc.errors.push(item.error); + doc.errors.push(new YAMLSyntaxError(item, "Unexpected ".concat(item.type, " node in sequence"))); + } + } + + return { + comments: comments, + items: items + }; +} + +function resolveFlowSeqItems(doc, cst) { + var comments = []; + var items = []; + var explicitKey = false; + var key = undefined; + var keyStart = null; + var next = '['; + var prevItem = null; + + for (var i = 0; i < cst.items.length; ++i) { + var item = cst.items[i]; + + if (typeof item.char === 'string') { + var char = item.char, + offset = item.offset; + + if (char !== ':' && (explicitKey || key !== undefined)) { + if (explicitKey && key === undefined) key = next ? items.pop() : null; + items.push(new Pair(key)); + explicitKey = false; + key = undefined; + keyStart = null; + } + + if (char === next) { + next = null; + } else if (!next && char === '?') { + explicitKey = true; + } else if (next !== '[' && char === ':' && key === undefined) { + if (next === ',') { + key = items.pop(); + + if (key instanceof Pair) { + var msg = 'Chaining flow sequence pairs is invalid'; + var err = new YAMLSemanticError(cst, msg); + err.offset = offset; + doc.errors.push(err); + } + + if (!explicitKey && typeof keyStart === 'number') { + var keyEnd = item.range ? item.range.start : item.offset; + if (keyEnd > keyStart + 1024) doc.errors.push(getLongKeyError(cst, key)); + var src = prevItem.context.src; + + for (var _i = keyStart; _i < keyEnd; ++_i) { + if (src[_i] === '\n') { + var _msg = 'Implicit keys of flow sequence pairs need to be on a single line'; + doc.errors.push(new YAMLSemanticError(prevItem, _msg)); + break; + } + } + } + } else { + key = null; + } + + keyStart = null; + explicitKey = false; + next = null; + } else if (next === '[' || char !== ']' || i < cst.items.length - 1) { + var _msg2 = "Flow sequence contains an unexpected ".concat(char); + + var _err = new YAMLSyntaxError(cst, _msg2); + + _err.offset = offset; + doc.errors.push(_err); + } + } else if (item.type === Type.BLANK_LINE) { + comments.push({ + before: items.length + }); + } else if (item.type === Type.COMMENT) { + checkFlowCommentSpace(doc.errors, item); + comments.push({ + comment: item.comment, + before: items.length + }); + } else { + if (next) { + var _msg3 = "Expected a ".concat(next, " in flow sequence"); + + doc.errors.push(new YAMLSemanticError(item, _msg3)); + } + + var value = resolveNode(doc, item); + + if (key === undefined) { + items.push(value); + prevItem = item; + } else { + items.push(new Pair(key, value)); + key = undefined; + } + + keyStart = item.range.start; + next = ','; + } + } + + checkFlowCollectionEnd(doc.errors, cst); + if (key !== undefined) items.push(new Pair(key)); + return { + comments: comments, + items: items + }; +} + +export { Alias as A, Collection as C, Merge as M, Node as N, Pair as P, Scalar as S, YAMLSeq as Y, boolOptions as a, binaryOptions as b, stringifyString as c, YAMLMap as d, isEmptyPath as e, addComment as f, resolveMap as g, resolveSeq as h, intOptions as i, resolveString as j, stringifyNumber as k, findPair as l, nullOptions as n, resolveNode as r, strOptions as s, toJSON as t }; diff --git a/node_modules/yaml/browser/dist/types.js b/node_modules/yaml/browser/dist/types.js new file mode 100644 index 0000000..da2288b --- /dev/null +++ b/node_modules/yaml/browser/dist/types.js @@ -0,0 +1,4 @@ +export { A as Alias, C as Collection, M as Merge, N as Node, P as Pair, S as Scalar, d as YAMLMap, Y as YAMLSeq, b as binaryOptions, a as boolOptions, i as intOptions, n as nullOptions, s as strOptions } from './resolveSeq-492ab440.js'; +export { S as Schema } from './Schema-e94716c8.js'; +import './PlainValue-b8036b75.js'; +import './warnings-df54cb69.js'; diff --git a/node_modules/yaml/browser/dist/util.js b/node_modules/yaml/browser/dist/util.js new file mode 100644 index 0000000..29f4663 --- /dev/null +++ b/node_modules/yaml/browser/dist/util.js @@ -0,0 +1,2 @@ +export { l as findPair, g as parseMap, h as parseSeq, k as stringifyNumber, c as stringifyString, t as toJSON } from './resolveSeq-492ab440.js'; +export { T as Type, i as YAMLError, o as YAMLReferenceError, g as YAMLSemanticError, Y as YAMLSyntaxError, f as YAMLWarning } from './PlainValue-b8036b75.js'; diff --git a/node_modules/yaml/browser/dist/warnings-df54cb69.js b/node_modules/yaml/browser/dist/warnings-df54cb69.js new file mode 100644 index 0000000..9532e83 --- /dev/null +++ b/node_modules/yaml/browser/dist/warnings-df54cb69.js @@ -0,0 +1,499 @@ +import { o as YAMLReferenceError, T as Type, g as YAMLSemanticError, _ as _createForOfIteratorHelper, e as _defineProperty, j as _inherits, k as _createSuper, c as _classCallCheck, p as _assertThisInitialized, b as _createClass, a as _typeof, l as _get, m as _getPrototypeOf } from './PlainValue-b8036b75.js'; +import { j as resolveString, b as binaryOptions, c as stringifyString, h as resolveSeq, P as Pair, d as YAMLMap, Y as YAMLSeq, t as toJSON, S as Scalar, l as findPair, g as resolveMap, k as stringifyNumber } from './resolveSeq-492ab440.js'; + +/* global atob, btoa, Buffer */ +var binary = { + identify: function identify(value) { + return value instanceof Uint8Array; + }, + // Buffer inherits from Uint8Array + default: false, + tag: 'tag:yaml.org,2002:binary', + + /** + * Returns a Buffer in node and an Uint8Array in browsers + * + * To use the resulting buffer as an image, you'll want to do something like: + * + * const blob = new Blob([buffer], { type: 'image/jpeg' }) + * document.querySelector('#photo').src = URL.createObjectURL(blob) + */ + resolve: function resolve(doc, node) { + var src = resolveString(doc, node); + + if (typeof Buffer === 'function') { + return Buffer.from(src, 'base64'); + } else if (typeof atob === 'function') { + // On IE 11, atob() can't handle newlines + var str = atob(src.replace(/[\n\r]/g, '')); + var buffer = new Uint8Array(str.length); + + for (var i = 0; i < str.length; ++i) { + buffer[i] = str.charCodeAt(i); + } + + return buffer; + } else { + var msg = 'This environment does not support reading binary tags; either Buffer or atob is required'; + doc.errors.push(new YAMLReferenceError(node, msg)); + return null; + } + }, + options: binaryOptions, + stringify: function stringify(_ref, ctx, onComment, onChompKeep) { + var comment = _ref.comment, + type = _ref.type, + value = _ref.value; + var src; + + if (typeof Buffer === 'function') { + src = value instanceof Buffer ? value.toString('base64') : Buffer.from(value.buffer).toString('base64'); + } else if (typeof btoa === 'function') { + var s = ''; + + for (var i = 0; i < value.length; ++i) { + s += String.fromCharCode(value[i]); + } + + src = btoa(s); + } else { + throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); + } + + if (!type) type = binaryOptions.defaultType; + + if (type === Type.QUOTE_DOUBLE) { + value = src; + } else { + var lineWidth = binaryOptions.lineWidth; + var n = Math.ceil(src.length / lineWidth); + var lines = new Array(n); + + for (var _i = 0, o = 0; _i < n; ++_i, o += lineWidth) { + lines[_i] = src.substr(o, lineWidth); + } + + value = lines.join(type === Type.BLOCK_LITERAL ? '\n' : ' '); + } + + return stringifyString({ + comment: comment, + type: type, + value: value + }, ctx, onComment, onChompKeep); + } +}; + +function parsePairs(doc, cst) { + var seq = resolveSeq(doc, cst); + + for (var i = 0; i < seq.items.length; ++i) { + var item = seq.items[i]; + if (item instanceof Pair) continue;else if (item instanceof YAMLMap) { + if (item.items.length > 1) { + var msg = 'Each pair must have its own sequence indicator'; + throw new YAMLSemanticError(cst, msg); + } + + var pair = item.items[0] || new Pair(); + if (item.commentBefore) pair.commentBefore = pair.commentBefore ? "".concat(item.commentBefore, "\n").concat(pair.commentBefore) : item.commentBefore; + if (item.comment) pair.comment = pair.comment ? "".concat(item.comment, "\n").concat(pair.comment) : item.comment; + item = pair; + } + seq.items[i] = item instanceof Pair ? item : new Pair(item); + } + + return seq; +} +function createPairs(schema, iterable, ctx) { + var pairs = new YAMLSeq(schema); + pairs.tag = 'tag:yaml.org,2002:pairs'; + + var _iterator = _createForOfIteratorHelper(iterable), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var it = _step.value; + var key = void 0, + value = void 0; + + if (Array.isArray(it)) { + if (it.length === 2) { + key = it[0]; + value = it[1]; + } else throw new TypeError("Expected [key, value] tuple: ".concat(it)); + } else if (it && it instanceof Object) { + var keys = Object.keys(it); + + if (keys.length === 1) { + key = keys[0]; + value = it[key]; + } else throw new TypeError("Expected { key: value } tuple: ".concat(it)); + } else { + key = it; + } + + var pair = schema.createPair(key, value, ctx); + pairs.items.push(pair); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return pairs; +} +var pairs = { + default: false, + tag: 'tag:yaml.org,2002:pairs', + resolve: parsePairs, + createNode: createPairs +}; + +var YAMLOMap = /*#__PURE__*/function (_YAMLSeq) { + _inherits(YAMLOMap, _YAMLSeq); + + var _super = _createSuper(YAMLOMap); + + function YAMLOMap() { + var _this; + + _classCallCheck(this, YAMLOMap); + + _this = _super.call(this); + + _defineProperty(_assertThisInitialized(_this), "add", YAMLMap.prototype.add.bind(_assertThisInitialized(_this))); + + _defineProperty(_assertThisInitialized(_this), "delete", YAMLMap.prototype.delete.bind(_assertThisInitialized(_this))); + + _defineProperty(_assertThisInitialized(_this), "get", YAMLMap.prototype.get.bind(_assertThisInitialized(_this))); + + _defineProperty(_assertThisInitialized(_this), "has", YAMLMap.prototype.has.bind(_assertThisInitialized(_this))); + + _defineProperty(_assertThisInitialized(_this), "set", YAMLMap.prototype.set.bind(_assertThisInitialized(_this))); + + _this.tag = YAMLOMap.tag; + return _this; + } + + _createClass(YAMLOMap, [{ + key: "toJSON", + value: function toJSON$1(_, ctx) { + var map = new Map(); + if (ctx && ctx.onCreate) ctx.onCreate(map); + + var _iterator = _createForOfIteratorHelper(this.items), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var pair = _step.value; + var key = void 0, + value = void 0; + + if (pair instanceof Pair) { + key = toJSON(pair.key, '', ctx); + value = toJSON(pair.value, key, ctx); + } else { + key = toJSON(pair, '', ctx); + } + + if (map.has(key)) throw new Error('Ordered maps must not include duplicate keys'); + map.set(key, value); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return map; + } + }]); + + return YAMLOMap; +}(YAMLSeq); + +_defineProperty(YAMLOMap, "tag", 'tag:yaml.org,2002:omap'); + +function parseOMap(doc, cst) { + var pairs = parsePairs(doc, cst); + var seenKeys = []; + + var _iterator2 = _createForOfIteratorHelper(pairs.items), + _step2; + + try { + for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) { + var key = _step2.value.key; + + if (key instanceof Scalar) { + if (seenKeys.includes(key.value)) { + var msg = 'Ordered maps must not include duplicate keys'; + throw new YAMLSemanticError(cst, msg); + } else { + seenKeys.push(key.value); + } + } + } + } catch (err) { + _iterator2.e(err); + } finally { + _iterator2.f(); + } + + return Object.assign(new YAMLOMap(), pairs); +} + +function createOMap(schema, iterable, ctx) { + var pairs = createPairs(schema, iterable, ctx); + var omap = new YAMLOMap(); + omap.items = pairs.items; + return omap; +} + +var omap = { + identify: function identify(value) { + return value instanceof Map; + }, + nodeClass: YAMLOMap, + default: false, + tag: 'tag:yaml.org,2002:omap', + resolve: parseOMap, + createNode: createOMap +}; + +var YAMLSet = /*#__PURE__*/function (_YAMLMap) { + _inherits(YAMLSet, _YAMLMap); + + var _super = _createSuper(YAMLSet); + + function YAMLSet() { + var _this; + + _classCallCheck(this, YAMLSet); + + _this = _super.call(this); + _this.tag = YAMLSet.tag; + return _this; + } + + _createClass(YAMLSet, [{ + key: "add", + value: function add(key) { + var pair = key instanceof Pair ? key : new Pair(key); + var prev = findPair(this.items, pair.key); + if (!prev) this.items.push(pair); + } + }, { + key: "get", + value: function get(key, keepPair) { + var pair = findPair(this.items, key); + return !keepPair && pair instanceof Pair ? pair.key instanceof Scalar ? pair.key.value : pair.key : pair; + } + }, { + key: "set", + value: function set(key, value) { + if (typeof value !== 'boolean') throw new Error("Expected boolean value for set(key, value) in a YAML set, not ".concat(_typeof(value))); + var prev = findPair(this.items, key); + + if (prev && !value) { + this.items.splice(this.items.indexOf(prev), 1); + } else if (!prev && value) { + this.items.push(new Pair(key)); + } + } + }, { + key: "toJSON", + value: function toJSON(_, ctx) { + return _get(_getPrototypeOf(YAMLSet.prototype), "toJSON", this).call(this, _, ctx, Set); + } + }, { + key: "toString", + value: function toString(ctx, onComment, onChompKeep) { + if (!ctx) return JSON.stringify(this); + if (this.hasAllNullValues()) return _get(_getPrototypeOf(YAMLSet.prototype), "toString", this).call(this, ctx, onComment, onChompKeep);else throw new Error('Set items must all have null values'); + } + }]); + + return YAMLSet; +}(YAMLMap); + +_defineProperty(YAMLSet, "tag", 'tag:yaml.org,2002:set'); + +function parseSet(doc, cst) { + var map = resolveMap(doc, cst); + if (!map.hasAllNullValues()) throw new YAMLSemanticError(cst, 'Set items must all have null values'); + return Object.assign(new YAMLSet(), map); +} + +function createSet(schema, iterable, ctx) { + var set = new YAMLSet(); + + var _iterator = _createForOfIteratorHelper(iterable), + _step; + + try { + for (_iterator.s(); !(_step = _iterator.n()).done;) { + var value = _step.value; + set.items.push(schema.createPair(value, null, ctx)); + } + } catch (err) { + _iterator.e(err); + } finally { + _iterator.f(); + } + + return set; +} + +var set = { + identify: function identify(value) { + return value instanceof Set; + }, + nodeClass: YAMLSet, + default: false, + tag: 'tag:yaml.org,2002:set', + resolve: parseSet, + createNode: createSet +}; + +var parseSexagesimal = function parseSexagesimal(sign, parts) { + var n = parts.split(':').reduce(function (n, p) { + return n * 60 + Number(p); + }, 0); + return sign === '-' ? -n : n; +}; // hhhh:mm:ss.sss + + +var stringifySexagesimal = function stringifySexagesimal(_ref) { + var value = _ref.value; + if (isNaN(value) || !isFinite(value)) return stringifyNumber(value); + var sign = ''; + + if (value < 0) { + sign = '-'; + value = Math.abs(value); + } + + var parts = [value % 60]; // seconds, including ms + + if (value < 60) { + parts.unshift(0); // at least one : is required + } else { + value = Math.round((value - parts[0]) / 60); + parts.unshift(value % 60); // minutes + + if (value >= 60) { + value = Math.round((value - parts[0]) / 60); + parts.unshift(value); // hours + } + } + + return sign + parts.map(function (n) { + return n < 10 ? '0' + String(n) : String(n); + }).join(':').replace(/000000\d*$/, '') // % 60 may introduce error + ; +}; + +var intTime = { + identify: function identify(value) { + return typeof value === 'number'; + }, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'TIME', + test: /^([-+]?)([0-9][0-9_]*(?::[0-5]?[0-9])+)$/, + resolve: function resolve(str, sign, parts) { + return parseSexagesimal(sign, parts.replace(/_/g, '')); + }, + stringify: stringifySexagesimal +}; +var floatTime = { + identify: function identify(value) { + return typeof value === 'number'; + }, + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'TIME', + test: /^([-+]?)([0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*)$/, + resolve: function resolve(str, sign, parts) { + return parseSexagesimal(sign, parts.replace(/_/g, '')); + }, + stringify: stringifySexagesimal +}; +var timestamp = { + identify: function identify(value) { + return value instanceof Date; + }, + default: true, + tag: 'tag:yaml.org,2002:timestamp', + // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part + // may be omitted altogether, resulting in a date format. In such a case, the time part is + // assumed to be 00:00:00Z (start of day, UTC). + test: RegExp('^(?:' + '([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd + '(?:(?:t|T|[ \\t]+)' + // t | T | whitespace + '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? + '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 + ')?' + ')$'), + resolve: function resolve(str, year, month, day, hour, minute, second, millisec, tz) { + if (millisec) millisec = (millisec + '00').substr(1, 3); + var date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec || 0); + + if (tz && tz !== 'Z') { + var d = parseSexagesimal(tz[0], tz.slice(1)); + if (Math.abs(d) < 30) d *= 60; + date -= 60000 * d; + } + + return new Date(date); + }, + stringify: function stringify(_ref2) { + var value = _ref2.value; + return value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, ''); + } +}; + +/* global console, process, YAML_SILENCE_DEPRECATION_WARNINGS, YAML_SILENCE_WARNINGS */ +function shouldWarn(deprecation) { + var env = typeof process !== 'undefined' && process.env || {}; + + if (deprecation) { + if (typeof YAML_SILENCE_DEPRECATION_WARNINGS !== 'undefined') return !YAML_SILENCE_DEPRECATION_WARNINGS; + return !env.YAML_SILENCE_DEPRECATION_WARNINGS; + } + + if (typeof YAML_SILENCE_WARNINGS !== 'undefined') return !YAML_SILENCE_WARNINGS; + return !env.YAML_SILENCE_WARNINGS; +} + +function warn(warning, type) { + if (shouldWarn(false)) { + var emit = typeof process !== 'undefined' && process.emitWarning; // This will throw in Jest if `warning` is an Error instance due to + // https://github.com/facebook/jest/issues/2549 + + if (emit) emit(warning, type);else { + // eslint-disable-next-line no-console + console.warn(type ? "".concat(type, ": ").concat(warning) : warning); + } + } +} +function warnFileDeprecation(filename) { + if (shouldWarn(true)) { + var path = filename.replace(/.*yaml[/\\]/i, '').replace(/\.js$/, '').replace(/\\/g, '/'); + warn("The endpoint 'yaml/".concat(path, "' will be removed in a future release."), 'DeprecationWarning'); + } +} +var warned = {}; +function warnOptionDeprecation(name, alternative) { + if (!warned[name] && shouldWarn(true)) { + warned[name] = true; + var msg = "The option '".concat(name, "' will be removed in a future release"); + msg += alternative ? ", use '".concat(alternative, "' instead.") : '.'; + warn(msg, 'DeprecationWarning'); + } +} + +export { warnOptionDeprecation as a, binary as b, warnFileDeprecation as c, floatTime as f, intTime as i, omap as o, pairs as p, set as s, timestamp as t, warn as w }; diff --git a/node_modules/yaml/browser/index.js b/node_modules/yaml/browser/index.js new file mode 100644 index 0000000..b501ac4 --- /dev/null +++ b/node_modules/yaml/browser/index.js @@ -0,0 +1 @@ +module.exports = require('./dist').YAML diff --git a/node_modules/yaml/browser/map.js b/node_modules/yaml/browser/map.js new file mode 100644 index 0000000..78f2ebc --- /dev/null +++ b/node_modules/yaml/browser/map.js @@ -0,0 +1,2 @@ +module.exports = require('./dist/types').YAMLMap +require('./dist/legacy-exports').warnFileDeprecation(__filename) diff --git a/node_modules/yaml/browser/pair.js b/node_modules/yaml/browser/pair.js new file mode 100644 index 0000000..b2880a2 --- /dev/null +++ b/node_modules/yaml/browser/pair.js @@ -0,0 +1,2 @@ +module.exports = require('./dist/types').Pair +require('./dist/legacy-exports').warnFileDeprecation(__filename) diff --git a/node_modules/yaml/browser/parse-cst.js b/node_modules/yaml/browser/parse-cst.js new file mode 100644 index 0000000..8065756 --- /dev/null +++ b/node_modules/yaml/browser/parse-cst.js @@ -0,0 +1 @@ +module.exports = require('./dist/parse-cst').parse diff --git a/node_modules/yaml/browser/scalar.js b/node_modules/yaml/browser/scalar.js new file mode 100644 index 0000000..deee1b0 --- /dev/null +++ b/node_modules/yaml/browser/scalar.js @@ -0,0 +1,2 @@ +module.exports = require('./dist/types').Scalar +require('./dist/legacy-exports').warnFileDeprecation(__filename) diff --git a/node_modules/yaml/browser/schema.js b/node_modules/yaml/browser/schema.js new file mode 100644 index 0000000..4139c4b --- /dev/null +++ b/node_modules/yaml/browser/schema.js @@ -0,0 +1,9 @@ +const types = require('./dist/types') +const util = require('./dist/util') + +module.exports = types.Schema +module.exports.nullOptions = types.nullOptions +module.exports.strOptions = types.strOptions +module.exports.stringify = util.stringifyString + +require('./dist/legacy-exports').warnFileDeprecation(__filename) diff --git a/node_modules/yaml/browser/seq.js b/node_modules/yaml/browser/seq.js new file mode 100644 index 0000000..66147df --- /dev/null +++ b/node_modules/yaml/browser/seq.js @@ -0,0 +1,2 @@ +module.exports = require('./dist/types').YAMLSeq +require('./dist/legacy-exports').warnFileDeprecation(__filename) diff --git a/node_modules/yaml/browser/types.js b/node_modules/yaml/browser/types.js new file mode 100644 index 0000000..b1e46c4 --- /dev/null +++ b/node_modules/yaml/browser/types.js @@ -0,0 +1 @@ +module.exports = require('./dist/types') diff --git a/node_modules/yaml/browser/types/binary.js b/node_modules/yaml/browser/types/binary.js new file mode 100644 index 0000000..271b9de --- /dev/null +++ b/node_modules/yaml/browser/types/binary.js @@ -0,0 +1,8 @@ +'use strict' +Object.defineProperty(exports, '__esModule', { value: true }) + +const legacy = require('../dist/legacy-exports') +exports.binary = legacy.binary +exports.default = [exports.binary] + +legacy.warnFileDeprecation(__filename) diff --git a/node_modules/yaml/browser/types/omap.js b/node_modules/yaml/browser/types/omap.js new file mode 100644 index 0000000..37b638b --- /dev/null +++ b/node_modules/yaml/browser/types/omap.js @@ -0,0 +1,3 @@ +const legacy = require('../dist/legacy-exports') +module.exports = legacy.omap +legacy.warnFileDeprecation(__filename) diff --git a/node_modules/yaml/browser/types/pairs.js b/node_modules/yaml/browser/types/pairs.js new file mode 100644 index 0000000..f1df201 --- /dev/null +++ b/node_modules/yaml/browser/types/pairs.js @@ -0,0 +1,3 @@ +const legacy = require('../dist/legacy-exports') +module.exports = legacy.pairs +legacy.warnFileDeprecation(__filename) diff --git a/node_modules/yaml/browser/types/set.js b/node_modules/yaml/browser/types/set.js new file mode 100644 index 0000000..e7dc9d4 --- /dev/null +++ b/node_modules/yaml/browser/types/set.js @@ -0,0 +1,3 @@ +const legacy = require('../dist/legacy-exports') +module.exports = legacy.set +legacy.warnFileDeprecation(__filename) diff --git a/node_modules/yaml/browser/types/timestamp.js b/node_modules/yaml/browser/types/timestamp.js new file mode 100644 index 0000000..39c5b6d --- /dev/null +++ b/node_modules/yaml/browser/types/timestamp.js @@ -0,0 +1,10 @@ +'use strict' +Object.defineProperty(exports, '__esModule', { value: true }) + +const legacy = require('../dist/legacy-exports') +exports.default = [legacy.intTime, legacy.floatTime, legacy.timestamp] +exports.floatTime = legacy.floatTime +exports.intTime = legacy.intTime +exports.timestamp = legacy.timestamp + +legacy.warnFileDeprecation(__filename) diff --git a/node_modules/yaml/browser/util.js b/node_modules/yaml/browser/util.js new file mode 100644 index 0000000..bf38456 --- /dev/null +++ b/node_modules/yaml/browser/util.js @@ -0,0 +1 @@ +module.exports = require('./dist/util') diff --git a/node_modules/yaml/dist/Document-9b4560a1.js b/node_modules/yaml/dist/Document-9b4560a1.js new file mode 100644 index 0000000..a2613db --- /dev/null +++ b/node_modules/yaml/dist/Document-9b4560a1.js @@ -0,0 +1,757 @@ +'use strict'; + +var PlainValue = require('./PlainValue-ec8e588e.js'); +var resolveSeq = require('./resolveSeq-d03cb037.js'); +var Schema = require('./Schema-88e323a7.js'); + +const defaultOptions = { + anchorPrefix: 'a', + customTags: null, + indent: 2, + indentSeq: true, + keepCstNodes: false, + keepNodeTypes: true, + keepBlobsInJSON: true, + mapAsMap: false, + maxAliasCount: 100, + prettyErrors: false, + // TODO Set true in v2 + simpleKeys: false, + version: '1.2' +}; +const scalarOptions = { + get binary() { + return resolveSeq.binaryOptions; + }, + + set binary(opt) { + Object.assign(resolveSeq.binaryOptions, opt); + }, + + get bool() { + return resolveSeq.boolOptions; + }, + + set bool(opt) { + Object.assign(resolveSeq.boolOptions, opt); + }, + + get int() { + return resolveSeq.intOptions; + }, + + set int(opt) { + Object.assign(resolveSeq.intOptions, opt); + }, + + get null() { + return resolveSeq.nullOptions; + }, + + set null(opt) { + Object.assign(resolveSeq.nullOptions, opt); + }, + + get str() { + return resolveSeq.strOptions; + }, + + set str(opt) { + Object.assign(resolveSeq.strOptions, opt); + } + +}; +const documentOptions = { + '1.0': { + schema: 'yaml-1.1', + merge: true, + tagPrefixes: [{ + handle: '!', + prefix: PlainValue.defaultTagPrefix + }, { + handle: '!!', + prefix: 'tag:private.yaml.org,2002:' + }] + }, + 1.1: { + schema: 'yaml-1.1', + merge: true, + tagPrefixes: [{ + handle: '!', + prefix: '!' + }, { + handle: '!!', + prefix: PlainValue.defaultTagPrefix + }] + }, + 1.2: { + schema: 'core', + merge: false, + tagPrefixes: [{ + handle: '!', + prefix: '!' + }, { + handle: '!!', + prefix: PlainValue.defaultTagPrefix + }] + } +}; + +function stringifyTag(doc, tag) { + if ((doc.version || doc.options.version) === '1.0') { + const priv = tag.match(/^tag:private\.yaml\.org,2002:([^:/]+)$/); + if (priv) return '!' + priv[1]; + const vocab = tag.match(/^tag:([a-zA-Z0-9-]+)\.yaml\.org,2002:(.*)/); + return vocab ? `!${vocab[1]}/${vocab[2]}` : `!${tag.replace(/^tag:/, '')}`; + } + + let p = doc.tagPrefixes.find(p => tag.indexOf(p.prefix) === 0); + + if (!p) { + const dtp = doc.getDefaults().tagPrefixes; + p = dtp && dtp.find(p => tag.indexOf(p.prefix) === 0); + } + + if (!p) return tag[0] === '!' ? tag : `!<${tag}>`; + const suffix = tag.substr(p.prefix.length).replace(/[!,[\]{}]/g, ch => ({ + '!': '%21', + ',': '%2C', + '[': '%5B', + ']': '%5D', + '{': '%7B', + '}': '%7D' + })[ch]); + return p.handle + suffix; +} + +function getTagObject(tags, item) { + if (item instanceof resolveSeq.Alias) return resolveSeq.Alias; + + if (item.tag) { + const match = tags.filter(t => t.tag === item.tag); + if (match.length > 0) return match.find(t => t.format === item.format) || match[0]; + } + + let tagObj, obj; + + if (item instanceof resolveSeq.Scalar) { + obj = item.value; // TODO: deprecate/remove class check + + const match = tags.filter(t => t.identify && t.identify(obj) || t.class && obj instanceof t.class); + tagObj = match.find(t => t.format === item.format) || match.find(t => !t.format); + } else { + obj = item; + tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); + } + + if (!tagObj) { + const name = obj && obj.constructor ? obj.constructor.name : typeof obj; + throw new Error(`Tag not resolved for ${name} value`); + } + + return tagObj; +} // needs to be called before value stringifier to allow for circular anchor refs + + +function stringifyProps(node, tagObj, { + anchors, + doc +}) { + const props = []; + const anchor = doc.anchors.getName(node); + + if (anchor) { + anchors[anchor] = node; + props.push(`&${anchor}`); + } + + if (node.tag) { + props.push(stringifyTag(doc, node.tag)); + } else if (!tagObj.default) { + props.push(stringifyTag(doc, tagObj.tag)); + } + + return props.join(' '); +} + +function stringify(item, ctx, onComment, onChompKeep) { + const { + anchors, + schema + } = ctx.doc; + let tagObj; + + if (!(item instanceof resolveSeq.Node)) { + const createCtx = { + aliasNodes: [], + onTagObj: o => tagObj = o, + prevObjects: new Map() + }; + item = schema.createNode(item, true, null, createCtx); + + for (const alias of createCtx.aliasNodes) { + alias.source = alias.source.node; + let name = anchors.getName(alias.source); + + if (!name) { + name = anchors.newName(); + anchors.map[name] = alias.source; + } + } + } + + if (item instanceof resolveSeq.Pair) return item.toString(ctx, onComment, onChompKeep); + if (!tagObj) tagObj = getTagObject(schema.tags, item); + const props = stringifyProps(item, tagObj, ctx); + if (props.length > 0) ctx.indentAtStart = (ctx.indentAtStart || 0) + props.length + 1; + const str = typeof tagObj.stringify === 'function' ? tagObj.stringify(item, ctx, onComment, onChompKeep) : item instanceof resolveSeq.Scalar ? resolveSeq.stringifyString(item, ctx, onComment, onChompKeep) : item.toString(ctx, onComment, onChompKeep); + if (!props) return str; + return item instanceof resolveSeq.Scalar || str[0] === '{' || str[0] === '[' ? `${props} ${str}` : `${props}\n${ctx.indent}${str}`; +} + +class Anchors { + static validAnchorNode(node) { + return node instanceof resolveSeq.Scalar || node instanceof resolveSeq.YAMLSeq || node instanceof resolveSeq.YAMLMap; + } + + constructor(prefix) { + PlainValue._defineProperty(this, "map", Object.create(null)); + + this.prefix = prefix; + } + + createAlias(node, name) { + this.setAnchor(node, name); + return new resolveSeq.Alias(node); + } + + createMergePair(...sources) { + const merge = new resolveSeq.Merge(); + merge.value.items = sources.map(s => { + if (s instanceof resolveSeq.Alias) { + if (s.source instanceof resolveSeq.YAMLMap) return s; + } else if (s instanceof resolveSeq.YAMLMap) { + return this.createAlias(s); + } + + throw new Error('Merge sources must be Map nodes or their Aliases'); + }); + return merge; + } + + getName(node) { + const { + map + } = this; + return Object.keys(map).find(a => map[a] === node); + } + + getNames() { + return Object.keys(this.map); + } + + getNode(name) { + return this.map[name]; + } + + newName(prefix) { + if (!prefix) prefix = this.prefix; + const names = Object.keys(this.map); + + for (let i = 1; true; ++i) { + const name = `${prefix}${i}`; + if (!names.includes(name)) return name; + } + } // During parsing, map & aliases contain CST nodes + + + resolveNodes() { + const { + map, + _cstAliases + } = this; + Object.keys(map).forEach(a => { + map[a] = map[a].resolved; + }); + + _cstAliases.forEach(a => { + a.source = a.source.resolved; + }); + + delete this._cstAliases; + } + + setAnchor(node, name) { + if (node != null && !Anchors.validAnchorNode(node)) { + throw new Error('Anchors may only be set for Scalar, Seq and Map nodes'); + } + + if (name && /[\x00-\x19\s,[\]{}]/.test(name)) { + throw new Error('Anchor names must not contain whitespace or control characters'); + } + + const { + map + } = this; + const prev = node && Object.keys(map).find(a => map[a] === node); + + if (prev) { + if (!name) { + return prev; + } else if (prev !== name) { + delete map[prev]; + map[name] = node; + } + } else { + if (!name) { + if (!node) return null; + name = this.newName(); + } + + map[name] = node; + } + + return name; + } + +} + +const visit = (node, tags) => { + if (node && typeof node === 'object') { + const { + tag + } = node; + + if (node instanceof resolveSeq.Collection) { + if (tag) tags[tag] = true; + node.items.forEach(n => visit(n, tags)); + } else if (node instanceof resolveSeq.Pair) { + visit(node.key, tags); + visit(node.value, tags); + } else if (node instanceof resolveSeq.Scalar) { + if (tag) tags[tag] = true; + } + } + + return tags; +}; + +const listTagNames = node => Object.keys(visit(node, {})); + +function parseContents(doc, contents) { + const comments = { + before: [], + after: [] + }; + let body = undefined; + let spaceBefore = false; + + for (const node of contents) { + if (node.valueRange) { + if (body !== undefined) { + const msg = 'Document contains trailing content not separated by a ... or --- line'; + doc.errors.push(new PlainValue.YAMLSyntaxError(node, msg)); + break; + } + + const res = resolveSeq.resolveNode(doc, node); + + if (spaceBefore) { + res.spaceBefore = true; + spaceBefore = false; + } + + body = res; + } else if (node.comment !== null) { + const cc = body === undefined ? comments.before : comments.after; + cc.push(node.comment); + } else if (node.type === PlainValue.Type.BLANK_LINE) { + spaceBefore = true; + + if (body === undefined && comments.before.length > 0 && !doc.commentBefore) { + // space-separated comments at start are parsed as document comments + doc.commentBefore = comments.before.join('\n'); + comments.before = []; + } + } + } + + doc.contents = body || null; + + if (!body) { + doc.comment = comments.before.concat(comments.after).join('\n') || null; + } else { + const cb = comments.before.join('\n'); + + if (cb) { + const cbNode = body instanceof resolveSeq.Collection && body.items[0] ? body.items[0] : body; + cbNode.commentBefore = cbNode.commentBefore ? `${cb}\n${cbNode.commentBefore}` : cb; + } + + doc.comment = comments.after.join('\n') || null; + } +} + +function resolveTagDirective({ + tagPrefixes +}, directive) { + const [handle, prefix] = directive.parameters; + + if (!handle || !prefix) { + const msg = 'Insufficient parameters given for %TAG directive'; + throw new PlainValue.YAMLSemanticError(directive, msg); + } + + if (tagPrefixes.some(p => p.handle === handle)) { + const msg = 'The %TAG directive must only be given at most once per handle in the same document.'; + throw new PlainValue.YAMLSemanticError(directive, msg); + } + + return { + handle, + prefix + }; +} + +function resolveYamlDirective(doc, directive) { + let [version] = directive.parameters; + if (directive.name === 'YAML:1.0') version = '1.0'; + + if (!version) { + const msg = 'Insufficient parameters given for %YAML directive'; + throw new PlainValue.YAMLSemanticError(directive, msg); + } + + if (!documentOptions[version]) { + const v0 = doc.version || doc.options.version; + const msg = `Document will be parsed as YAML ${v0} rather than YAML ${version}`; + doc.warnings.push(new PlainValue.YAMLWarning(directive, msg)); + } + + return version; +} + +function parseDirectives(doc, directives, prevDoc) { + const directiveComments = []; + let hasDirectives = false; + + for (const directive of directives) { + const { + comment, + name + } = directive; + + switch (name) { + case 'TAG': + try { + doc.tagPrefixes.push(resolveTagDirective(doc, directive)); + } catch (error) { + doc.errors.push(error); + } + + hasDirectives = true; + break; + + case 'YAML': + case 'YAML:1.0': + if (doc.version) { + const msg = 'The %YAML directive must only be given at most once per document.'; + doc.errors.push(new PlainValue.YAMLSemanticError(directive, msg)); + } + + try { + doc.version = resolveYamlDirective(doc, directive); + } catch (error) { + doc.errors.push(error); + } + + hasDirectives = true; + break; + + default: + if (name) { + const msg = `YAML only supports %TAG and %YAML directives, and not %${name}`; + doc.warnings.push(new PlainValue.YAMLWarning(directive, msg)); + } + + } + + if (comment) directiveComments.push(comment); + } + + if (prevDoc && !hasDirectives && '1.1' === (doc.version || prevDoc.version || doc.options.version)) { + const copyTagPrefix = ({ + handle, + prefix + }) => ({ + handle, + prefix + }); + + doc.tagPrefixes = prevDoc.tagPrefixes.map(copyTagPrefix); + doc.version = prevDoc.version; + } + + doc.commentBefore = directiveComments.join('\n') || null; +} + +function assertCollection(contents) { + if (contents instanceof resolveSeq.Collection) return true; + throw new Error('Expected a YAML collection as document contents'); +} + +class Document { + constructor(options) { + this.anchors = new Anchors(options.anchorPrefix); + this.commentBefore = null; + this.comment = null; + this.contents = null; + this.directivesEndMarker = null; + this.errors = []; + this.options = options; + this.schema = null; + this.tagPrefixes = []; + this.version = null; + this.warnings = []; + } + + add(value) { + assertCollection(this.contents); + return this.contents.add(value); + } + + addIn(path, value) { + assertCollection(this.contents); + this.contents.addIn(path, value); + } + + delete(key) { + assertCollection(this.contents); + return this.contents.delete(key); + } + + deleteIn(path) { + if (resolveSeq.isEmptyPath(path)) { + if (this.contents == null) return false; + this.contents = null; + return true; + } + + assertCollection(this.contents); + return this.contents.deleteIn(path); + } + + getDefaults() { + return Document.defaults[this.version] || Document.defaults[this.options.version] || {}; + } + + get(key, keepScalar) { + return this.contents instanceof resolveSeq.Collection ? this.contents.get(key, keepScalar) : undefined; + } + + getIn(path, keepScalar) { + if (resolveSeq.isEmptyPath(path)) return !keepScalar && this.contents instanceof resolveSeq.Scalar ? this.contents.value : this.contents; + return this.contents instanceof resolveSeq.Collection ? this.contents.getIn(path, keepScalar) : undefined; + } + + has(key) { + return this.contents instanceof resolveSeq.Collection ? this.contents.has(key) : false; + } + + hasIn(path) { + if (resolveSeq.isEmptyPath(path)) return this.contents !== undefined; + return this.contents instanceof resolveSeq.Collection ? this.contents.hasIn(path) : false; + } + + set(key, value) { + assertCollection(this.contents); + this.contents.set(key, value); + } + + setIn(path, value) { + if (resolveSeq.isEmptyPath(path)) this.contents = value;else { + assertCollection(this.contents); + this.contents.setIn(path, value); + } + } + + setSchema(id, customTags) { + if (!id && !customTags && this.schema) return; + if (typeof id === 'number') id = id.toFixed(1); + + if (id === '1.0' || id === '1.1' || id === '1.2') { + if (this.version) this.version = id;else this.options.version = id; + delete this.options.schema; + } else if (id && typeof id === 'string') { + this.options.schema = id; + } + + if (Array.isArray(customTags)) this.options.customTags = customTags; + const opt = Object.assign({}, this.getDefaults(), this.options); + this.schema = new Schema.Schema(opt); + } + + parse(node, prevDoc) { + if (this.options.keepCstNodes) this.cstNode = node; + if (this.options.keepNodeTypes) this.type = 'DOCUMENT'; + const { + directives = [], + contents = [], + directivesEndMarker, + error, + valueRange + } = node; + + if (error) { + if (!error.source) error.source = this; + this.errors.push(error); + } + + parseDirectives(this, directives, prevDoc); + if (directivesEndMarker) this.directivesEndMarker = true; + this.range = valueRange ? [valueRange.start, valueRange.end] : null; + this.setSchema(); + this.anchors._cstAliases = []; + parseContents(this, contents); + this.anchors.resolveNodes(); + + if (this.options.prettyErrors) { + for (const error of this.errors) if (error instanceof PlainValue.YAMLError) error.makePretty(); + + for (const warn of this.warnings) if (warn instanceof PlainValue.YAMLError) warn.makePretty(); + } + + return this; + } + + listNonDefaultTags() { + return listTagNames(this.contents).filter(t => t.indexOf(Schema.Schema.defaultPrefix) !== 0); + } + + setTagPrefix(handle, prefix) { + if (handle[0] !== '!' || handle[handle.length - 1] !== '!') throw new Error('Handle must start and end with !'); + + if (prefix) { + const prev = this.tagPrefixes.find(p => p.handle === handle); + if (prev) prev.prefix = prefix;else this.tagPrefixes.push({ + handle, + prefix + }); + } else { + this.tagPrefixes = this.tagPrefixes.filter(p => p.handle !== handle); + } + } + + toJSON(arg, onAnchor) { + const { + keepBlobsInJSON, + mapAsMap, + maxAliasCount + } = this.options; + const keep = keepBlobsInJSON && (typeof arg !== 'string' || !(this.contents instanceof resolveSeq.Scalar)); + const ctx = { + doc: this, + indentStep: ' ', + keep, + mapAsMap: keep && !!mapAsMap, + maxAliasCount, + stringify // Requiring directly in Pair would create circular dependencies + + }; + const anchorNames = Object.keys(this.anchors.map); + if (anchorNames.length > 0) ctx.anchors = new Map(anchorNames.map(name => [this.anchors.map[name], { + alias: [], + aliasCount: 0, + count: 1 + }])); + const res = resolveSeq.toJSON(this.contents, arg, ctx); + if (typeof onAnchor === 'function' && ctx.anchors) for (const { + count, + res + } of ctx.anchors.values()) onAnchor(res, count); + return res; + } + + toString() { + if (this.errors.length > 0) throw new Error('Document with errors cannot be stringified'); + const indentSize = this.options.indent; + + if (!Number.isInteger(indentSize) || indentSize <= 0) { + const s = JSON.stringify(indentSize); + throw new Error(`"indent" option must be a positive integer, not ${s}`); + } + + this.setSchema(); + const lines = []; + let hasDirectives = false; + + if (this.version) { + let vd = '%YAML 1.2'; + + if (this.schema.name === 'yaml-1.1') { + if (this.version === '1.0') vd = '%YAML:1.0';else if (this.version === '1.1') vd = '%YAML 1.1'; + } + + lines.push(vd); + hasDirectives = true; + } + + const tagNames = this.listNonDefaultTags(); + this.tagPrefixes.forEach(({ + handle, + prefix + }) => { + if (tagNames.some(t => t.indexOf(prefix) === 0)) { + lines.push(`%TAG ${handle} ${prefix}`); + hasDirectives = true; + } + }); + if (hasDirectives || this.directivesEndMarker) lines.push('---'); + + if (this.commentBefore) { + if (hasDirectives || !this.directivesEndMarker) lines.unshift(''); + lines.unshift(this.commentBefore.replace(/^/gm, '#')); + } + + const ctx = { + anchors: Object.create(null), + doc: this, + indent: '', + indentStep: ' '.repeat(indentSize), + stringify // Requiring directly in nodes would create circular dependencies + + }; + let chompKeep = false; + let contentComment = null; + + if (this.contents) { + if (this.contents instanceof resolveSeq.Node) { + if (this.contents.spaceBefore && (hasDirectives || this.directivesEndMarker)) lines.push(''); + if (this.contents.commentBefore) lines.push(this.contents.commentBefore.replace(/^/gm, '#')); // top-level block scalars need to be indented if followed by a comment + + ctx.forceBlockIndent = !!this.comment; + contentComment = this.contents.comment; + } + + const onChompKeep = contentComment ? null : () => chompKeep = true; + const body = stringify(this.contents, ctx, () => contentComment = null, onChompKeep); + lines.push(resolveSeq.addComment(body, '', contentComment)); + } else if (this.contents !== undefined) { + lines.push(stringify(this.contents, ctx)); + } + + if (this.comment) { + if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') lines.push(''); + lines.push(this.comment.replace(/^/gm, '#')); + } + + return lines.join('\n') + '\n'; + } + +} + +PlainValue._defineProperty(Document, "defaults", documentOptions); + +exports.Document = Document; +exports.defaultOptions = defaultOptions; +exports.scalarOptions = scalarOptions; diff --git a/node_modules/yaml/dist/PlainValue-ec8e588e.js b/node_modules/yaml/dist/PlainValue-ec8e588e.js new file mode 100644 index 0000000..db8a14e --- /dev/null +++ b/node_modules/yaml/dist/PlainValue-ec8e588e.js @@ -0,0 +1,876 @@ +'use strict'; + +const Char = { + ANCHOR: '&', + COMMENT: '#', + TAG: '!', + DIRECTIVES_END: '-', + DOCUMENT_END: '.' +}; +const Type = { + ALIAS: 'ALIAS', + BLANK_LINE: 'BLANK_LINE', + BLOCK_FOLDED: 'BLOCK_FOLDED', + BLOCK_LITERAL: 'BLOCK_LITERAL', + COMMENT: 'COMMENT', + DIRECTIVE: 'DIRECTIVE', + DOCUMENT: 'DOCUMENT', + FLOW_MAP: 'FLOW_MAP', + FLOW_SEQ: 'FLOW_SEQ', + MAP: 'MAP', + MAP_KEY: 'MAP_KEY', + MAP_VALUE: 'MAP_VALUE', + PLAIN: 'PLAIN', + QUOTE_DOUBLE: 'QUOTE_DOUBLE', + QUOTE_SINGLE: 'QUOTE_SINGLE', + SEQ: 'SEQ', + SEQ_ITEM: 'SEQ_ITEM' +}; +const defaultTagPrefix = 'tag:yaml.org,2002:'; +const defaultTags = { + MAP: 'tag:yaml.org,2002:map', + SEQ: 'tag:yaml.org,2002:seq', + STR: 'tag:yaml.org,2002:str' +}; + +function findLineStarts(src) { + const ls = [0]; + let offset = src.indexOf('\n'); + + while (offset !== -1) { + offset += 1; + ls.push(offset); + offset = src.indexOf('\n', offset); + } + + return ls; +} + +function getSrcInfo(cst) { + let lineStarts, src; + + if (typeof cst === 'string') { + lineStarts = findLineStarts(cst); + src = cst; + } else { + if (Array.isArray(cst)) cst = cst[0]; + + if (cst && cst.context) { + if (!cst.lineStarts) cst.lineStarts = findLineStarts(cst.context.src); + lineStarts = cst.lineStarts; + src = cst.context.src; + } + } + + return { + lineStarts, + src + }; +} +/** + * @typedef {Object} LinePos - One-indexed position in the source + * @property {number} line + * @property {number} col + */ + +/** + * Determine the line/col position matching a character offset. + * + * Accepts a source string or a CST document as the second parameter. With + * the latter, starting indices for lines are cached in the document as + * `lineStarts: number[]`. + * + * Returns a one-indexed `{ line, col }` location if found, or + * `undefined` otherwise. + * + * @param {number} offset + * @param {string|Document|Document[]} cst + * @returns {?LinePos} + */ + + +function getLinePos(offset, cst) { + if (typeof offset !== 'number' || offset < 0) return null; + const { + lineStarts, + src + } = getSrcInfo(cst); + if (!lineStarts || !src || offset > src.length) return null; + + for (let i = 0; i < lineStarts.length; ++i) { + const start = lineStarts[i]; + + if (offset < start) { + return { + line: i, + col: offset - lineStarts[i - 1] + 1 + }; + } + + if (offset === start) return { + line: i + 1, + col: 1 + }; + } + + const line = lineStarts.length; + return { + line, + col: offset - lineStarts[line - 1] + 1 + }; +} +/** + * Get a specified line from the source. + * + * Accepts a source string or a CST document as the second parameter. With + * the latter, starting indices for lines are cached in the document as + * `lineStarts: number[]`. + * + * Returns the line as a string if found, or `null` otherwise. + * + * @param {number} line One-indexed line number + * @param {string|Document|Document[]} cst + * @returns {?string} + */ + +function getLine(line, cst) { + const { + lineStarts, + src + } = getSrcInfo(cst); + if (!lineStarts || !(line >= 1) || line > lineStarts.length) return null; + const start = lineStarts[line - 1]; + let end = lineStarts[line]; // undefined for last line; that's ok for slice() + + while (end && end > start && src[end - 1] === '\n') --end; + + return src.slice(start, end); +} +/** + * Pretty-print the starting line from the source indicated by the range `pos` + * + * Trims output to `maxWidth` chars while keeping the starting column visible, + * using `…` at either end to indicate dropped characters. + * + * Returns a two-line string (or `null`) with `\n` as separator; the second line + * will hold appropriately indented `^` marks indicating the column range. + * + * @param {Object} pos + * @param {LinePos} pos.start + * @param {LinePos} [pos.end] + * @param {string|Document|Document[]*} cst + * @param {number} [maxWidth=80] + * @returns {?string} + */ + +function getPrettyContext({ + start, + end +}, cst, maxWidth = 80) { + let src = getLine(start.line, cst); + if (!src) return null; + let { + col + } = start; + + if (src.length > maxWidth) { + if (col <= maxWidth - 10) { + src = src.substr(0, maxWidth - 1) + '…'; + } else { + const halfWidth = Math.round(maxWidth / 2); + if (src.length > col + halfWidth) src = src.substr(0, col + halfWidth - 1) + '…'; + col -= src.length - maxWidth; + src = '…' + src.substr(1 - maxWidth); + } + } + + let errLen = 1; + let errEnd = ''; + + if (end) { + if (end.line === start.line && col + (end.col - start.col) <= maxWidth + 1) { + errLen = end.col - start.col; + } else { + errLen = Math.min(src.length + 1, maxWidth) - col; + errEnd = '…'; + } + } + + const offset = col > 1 ? ' '.repeat(col - 1) : ''; + const err = '^'.repeat(errLen); + return `${src}\n${offset}${err}${errEnd}`; +} + +class Range { + static copy(orig) { + return new Range(orig.start, orig.end); + } + + constructor(start, end) { + this.start = start; + this.end = end || start; + } + + isEmpty() { + return typeof this.start !== 'number' || !this.end || this.end <= this.start; + } + /** + * Set `origStart` and `origEnd` to point to the original source range for + * this node, which may differ due to dropped CR characters. + * + * @param {number[]} cr - Positions of dropped CR characters + * @param {number} offset - Starting index of `cr` from the last call + * @returns {number} - The next offset, matching the one found for `origStart` + */ + + + setOrigRange(cr, offset) { + const { + start, + end + } = this; + + if (cr.length === 0 || end <= cr[0]) { + this.origStart = start; + this.origEnd = end; + return offset; + } + + let i = offset; + + while (i < cr.length) { + if (cr[i] > start) break;else ++i; + } + + this.origStart = start + i; + const nextOffset = i; + + while (i < cr.length) { + // if end was at \n, it should now be at \r + if (cr[i] >= end) break;else ++i; + } + + this.origEnd = end + i; + return nextOffset; + } + +} + +/** Root class of all nodes */ + +class Node { + static addStringTerminator(src, offset, str) { + if (str[str.length - 1] === '\n') return str; + const next = Node.endOfWhiteSpace(src, offset); + return next >= src.length || src[next] === '\n' ? str + '\n' : str; + } // ^(---|...) + + + static atDocumentBoundary(src, offset, sep) { + const ch0 = src[offset]; + if (!ch0) return true; + const prev = src[offset - 1]; + if (prev && prev !== '\n') return false; + + if (sep) { + if (ch0 !== sep) return false; + } else { + if (ch0 !== Char.DIRECTIVES_END && ch0 !== Char.DOCUMENT_END) return false; + } + + const ch1 = src[offset + 1]; + const ch2 = src[offset + 2]; + if (ch1 !== ch0 || ch2 !== ch0) return false; + const ch3 = src[offset + 3]; + return !ch3 || ch3 === '\n' || ch3 === '\t' || ch3 === ' '; + } + + static endOfIdentifier(src, offset) { + let ch = src[offset]; + const isVerbatim = ch === '<'; + const notOk = isVerbatim ? ['\n', '\t', ' ', '>'] : ['\n', '\t', ' ', '[', ']', '{', '}', ',']; + + while (ch && notOk.indexOf(ch) === -1) ch = src[offset += 1]; + + if (isVerbatim && ch === '>') offset += 1; + return offset; + } + + static endOfIndent(src, offset) { + let ch = src[offset]; + + while (ch === ' ') ch = src[offset += 1]; + + return offset; + } + + static endOfLine(src, offset) { + let ch = src[offset]; + + while (ch && ch !== '\n') ch = src[offset += 1]; + + return offset; + } + + static endOfWhiteSpace(src, offset) { + let ch = src[offset]; + + while (ch === '\t' || ch === ' ') ch = src[offset += 1]; + + return offset; + } + + static startOfLine(src, offset) { + let ch = src[offset - 1]; + if (ch === '\n') return offset; + + while (ch && ch !== '\n') ch = src[offset -= 1]; + + return offset + 1; + } + /** + * End of indentation, or null if the line's indent level is not more + * than `indent` + * + * @param {string} src + * @param {number} indent + * @param {number} lineStart + * @returns {?number} + */ + + + static endOfBlockIndent(src, indent, lineStart) { + const inEnd = Node.endOfIndent(src, lineStart); + + if (inEnd > lineStart + indent) { + return inEnd; + } else { + const wsEnd = Node.endOfWhiteSpace(src, inEnd); + const ch = src[wsEnd]; + if (!ch || ch === '\n') return wsEnd; + } + + return null; + } + + static atBlank(src, offset, endAsBlank) { + const ch = src[offset]; + return ch === '\n' || ch === '\t' || ch === ' ' || endAsBlank && !ch; + } + + static nextNodeIsIndented(ch, indentDiff, indicatorAsIndent) { + if (!ch || indentDiff < 0) return false; + if (indentDiff > 0) return true; + return indicatorAsIndent && ch === '-'; + } // should be at line or string end, or at next non-whitespace char + + + static normalizeOffset(src, offset) { + const ch = src[offset]; + return !ch ? offset : ch !== '\n' && src[offset - 1] === '\n' ? offset - 1 : Node.endOfWhiteSpace(src, offset); + } // fold single newline into space, multiple newlines to N - 1 newlines + // presumes src[offset] === '\n' + + + static foldNewline(src, offset, indent) { + let inCount = 0; + let error = false; + let fold = ''; + let ch = src[offset + 1]; + + while (ch === ' ' || ch === '\t' || ch === '\n') { + switch (ch) { + case '\n': + inCount = 0; + offset += 1; + fold += '\n'; + break; + + case '\t': + if (inCount <= indent) error = true; + offset = Node.endOfWhiteSpace(src, offset + 2) - 1; + break; + + case ' ': + inCount += 1; + offset += 1; + break; + } + + ch = src[offset + 1]; + } + + if (!fold) fold = ' '; + if (ch && inCount <= indent) error = true; + return { + fold, + offset, + error + }; + } + + constructor(type, props, context) { + Object.defineProperty(this, 'context', { + value: context || null, + writable: true + }); + this.error = null; + this.range = null; + this.valueRange = null; + this.props = props || []; + this.type = type; + this.value = null; + } + + getPropValue(idx, key, skipKey) { + if (!this.context) return null; + const { + src + } = this.context; + const prop = this.props[idx]; + return prop && src[prop.start] === key ? src.slice(prop.start + (skipKey ? 1 : 0), prop.end) : null; + } + + get anchor() { + for (let i = 0; i < this.props.length; ++i) { + const anchor = this.getPropValue(i, Char.ANCHOR, true); + if (anchor != null) return anchor; + } + + return null; + } + + get comment() { + const comments = []; + + for (let i = 0; i < this.props.length; ++i) { + const comment = this.getPropValue(i, Char.COMMENT, true); + if (comment != null) comments.push(comment); + } + + return comments.length > 0 ? comments.join('\n') : null; + } + + commentHasRequiredWhitespace(start) { + const { + src + } = this.context; + if (this.header && start === this.header.end) return false; + if (!this.valueRange) return false; + const { + end + } = this.valueRange; + return start !== end || Node.atBlank(src, end - 1); + } + + get hasComment() { + if (this.context) { + const { + src + } = this.context; + + for (let i = 0; i < this.props.length; ++i) { + if (src[this.props[i].start] === Char.COMMENT) return true; + } + } + + return false; + } + + get hasProps() { + if (this.context) { + const { + src + } = this.context; + + for (let i = 0; i < this.props.length; ++i) { + if (src[this.props[i].start] !== Char.COMMENT) return true; + } + } + + return false; + } + + get includesTrailingLines() { + return false; + } + + get jsonLike() { + const jsonLikeTypes = [Type.FLOW_MAP, Type.FLOW_SEQ, Type.QUOTE_DOUBLE, Type.QUOTE_SINGLE]; + return jsonLikeTypes.indexOf(this.type) !== -1; + } + + get rangeAsLinePos() { + if (!this.range || !this.context) return undefined; + const start = getLinePos(this.range.start, this.context.root); + if (!start) return undefined; + const end = getLinePos(this.range.end, this.context.root); + return { + start, + end + }; + } + + get rawValue() { + if (!this.valueRange || !this.context) return null; + const { + start, + end + } = this.valueRange; + return this.context.src.slice(start, end); + } + + get tag() { + for (let i = 0; i < this.props.length; ++i) { + const tag = this.getPropValue(i, Char.TAG, false); + + if (tag != null) { + if (tag[1] === '<') { + return { + verbatim: tag.slice(2, -1) + }; + } else { + // eslint-disable-next-line no-unused-vars + const [_, handle, suffix] = tag.match(/^(.*!)([^!]*)$/); + return { + handle, + suffix + }; + } + } + } + + return null; + } + + get valueRangeContainsNewline() { + if (!this.valueRange || !this.context) return false; + const { + start, + end + } = this.valueRange; + const { + src + } = this.context; + + for (let i = start; i < end; ++i) { + if (src[i] === '\n') return true; + } + + return false; + } + + parseComment(start) { + const { + src + } = this.context; + + if (src[start] === Char.COMMENT) { + const end = Node.endOfLine(src, start + 1); + const commentRange = new Range(start, end); + this.props.push(commentRange); + return end; + } + + return start; + } + /** + * Populates the `origStart` and `origEnd` values of all ranges for this + * node. Extended by child classes to handle descendant nodes. + * + * @param {number[]} cr - Positions of dropped CR characters + * @param {number} offset - Starting index of `cr` from the last call + * @returns {number} - The next offset, matching the one found for `origStart` + */ + + + setOrigRanges(cr, offset) { + if (this.range) offset = this.range.setOrigRange(cr, offset); + if (this.valueRange) this.valueRange.setOrigRange(cr, offset); + this.props.forEach(prop => prop.setOrigRange(cr, offset)); + return offset; + } + + toString() { + const { + context: { + src + }, + range, + value + } = this; + if (value != null) return value; + const str = src.slice(range.start, range.end); + return Node.addStringTerminator(src, range.end, str); + } + +} + +class YAMLError extends Error { + constructor(name, source, message) { + if (!message || !(source instanceof Node)) throw new Error(`Invalid arguments for new ${name}`); + super(); + this.name = name; + this.message = message; + this.source = source; + } + + makePretty() { + if (!this.source) return; + this.nodeType = this.source.type; + const cst = this.source.context && this.source.context.root; + + if (typeof this.offset === 'number') { + this.range = new Range(this.offset, this.offset + 1); + const start = cst && getLinePos(this.offset, cst); + + if (start) { + const end = { + line: start.line, + col: start.col + 1 + }; + this.linePos = { + start, + end + }; + } + + delete this.offset; + } else { + this.range = this.source.range; + this.linePos = this.source.rangeAsLinePos; + } + + if (this.linePos) { + const { + line, + col + } = this.linePos.start; + this.message += ` at line ${line}, column ${col}`; + const ctx = cst && getPrettyContext(this.linePos, cst); + if (ctx) this.message += `:\n\n${ctx}\n`; + } + + delete this.source; + } + +} +class YAMLReferenceError extends YAMLError { + constructor(source, message) { + super('YAMLReferenceError', source, message); + } + +} +class YAMLSemanticError extends YAMLError { + constructor(source, message) { + super('YAMLSemanticError', source, message); + } + +} +class YAMLSyntaxError extends YAMLError { + constructor(source, message) { + super('YAMLSyntaxError', source, message); + } + +} +class YAMLWarning extends YAMLError { + constructor(source, message) { + super('YAMLWarning', source, message); + } + +} + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + +class PlainValue extends Node { + static endOfLine(src, start, inFlow) { + let ch = src[start]; + let offset = start; + + while (ch && ch !== '\n') { + if (inFlow && (ch === '[' || ch === ']' || ch === '{' || ch === '}' || ch === ',')) break; + const next = src[offset + 1]; + if (ch === ':' && (!next || next === '\n' || next === '\t' || next === ' ' || inFlow && next === ',')) break; + if ((ch === ' ' || ch === '\t') && next === '#') break; + offset += 1; + ch = next; + } + + return offset; + } + + get strValue() { + if (!this.valueRange || !this.context) return null; + let { + start, + end + } = this.valueRange; + const { + src + } = this.context; + let ch = src[end - 1]; + + while (start < end && (ch === '\n' || ch === '\t' || ch === ' ')) ch = src[--end - 1]; + + let str = ''; + + for (let i = start; i < end; ++i) { + const ch = src[i]; + + if (ch === '\n') { + const { + fold, + offset + } = Node.foldNewline(src, i, -1); + str += fold; + i = offset; + } else if (ch === ' ' || ch === '\t') { + // trim trailing whitespace + const wsStart = i; + let next = src[i + 1]; + + while (i < end && (next === ' ' || next === '\t')) { + i += 1; + next = src[i + 1]; + } + + if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch; + } else { + str += ch; + } + } + + const ch0 = src[start]; + + switch (ch0) { + case '\t': + { + const msg = 'Plain value cannot start with a tab character'; + const errors = [new YAMLSemanticError(this, msg)]; + return { + errors, + str + }; + } + + case '@': + case '`': + { + const msg = `Plain value cannot start with reserved character ${ch0}`; + const errors = [new YAMLSemanticError(this, msg)]; + return { + errors, + str + }; + } + + default: + return str; + } + } + + parseBlockValue(start) { + const { + indent, + inFlow, + src + } = this.context; + let offset = start; + let valueEnd = start; + + for (let ch = src[offset]; ch === '\n'; ch = src[offset]) { + if (Node.atDocumentBoundary(src, offset + 1)) break; + const end = Node.endOfBlockIndent(src, indent, offset + 1); + if (end === null || src[end] === '#') break; + + if (src[end] === '\n') { + offset = end; + } else { + valueEnd = PlainValue.endOfLine(src, end, inFlow); + offset = valueEnd; + } + } + + if (this.valueRange.isEmpty()) this.valueRange.start = start; + this.valueRange.end = valueEnd; + return valueEnd; + } + /** + * Parses a plain value from the source + * + * Accepted forms are: + * ``` + * #comment + * + * first line + * + * first line #comment + * + * first line + * block + * lines + * + * #comment + * block + * lines + * ``` + * where block lines are empty or have an indent level greater than `indent`. + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar, may be `\n` + */ + + + parse(context, start) { + this.context = context; + const { + inFlow, + src + } = context; + let offset = start; + const ch = src[offset]; + + if (ch && ch !== '#' && ch !== '\n') { + offset = PlainValue.endOfLine(src, start, inFlow); + } + + this.valueRange = new Range(start, offset); + offset = Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + + if (!this.hasComment || this.valueRange.isEmpty()) { + offset = this.parseBlockValue(offset); + } + + return offset; + } + +} + +exports.Char = Char; +exports.Node = Node; +exports.PlainValue = PlainValue; +exports.Range = Range; +exports.Type = Type; +exports.YAMLError = YAMLError; +exports.YAMLReferenceError = YAMLReferenceError; +exports.YAMLSemanticError = YAMLSemanticError; +exports.YAMLSyntaxError = YAMLSyntaxError; +exports.YAMLWarning = YAMLWarning; +exports._defineProperty = _defineProperty; +exports.defaultTagPrefix = defaultTagPrefix; +exports.defaultTags = defaultTags; diff --git a/node_modules/yaml/dist/Schema-88e323a7.js b/node_modules/yaml/dist/Schema-88e323a7.js new file mode 100644 index 0000000..5de109f --- /dev/null +++ b/node_modules/yaml/dist/Schema-88e323a7.js @@ -0,0 +1,525 @@ +'use strict'; + +var PlainValue = require('./PlainValue-ec8e588e.js'); +var resolveSeq = require('./resolveSeq-d03cb037.js'); +var warnings = require('./warnings-1000a372.js'); + +function createMap(schema, obj, ctx) { + const map = new resolveSeq.YAMLMap(schema); + + if (obj instanceof Map) { + for (const [key, value] of obj) map.items.push(schema.createPair(key, value, ctx)); + } else if (obj && typeof obj === 'object') { + for (const key of Object.keys(obj)) map.items.push(schema.createPair(key, obj[key], ctx)); + } + + if (typeof schema.sortMapEntries === 'function') { + map.items.sort(schema.sortMapEntries); + } + + return map; +} + +const map = { + createNode: createMap, + default: true, + nodeClass: resolveSeq.YAMLMap, + tag: 'tag:yaml.org,2002:map', + resolve: resolveSeq.resolveMap +}; + +function createSeq(schema, obj, ctx) { + const seq = new resolveSeq.YAMLSeq(schema); + + if (obj && obj[Symbol.iterator]) { + for (const it of obj) { + const v = schema.createNode(it, ctx.wrapScalars, null, ctx); + seq.items.push(v); + } + } + + return seq; +} + +const seq = { + createNode: createSeq, + default: true, + nodeClass: resolveSeq.YAMLSeq, + tag: 'tag:yaml.org,2002:seq', + resolve: resolveSeq.resolveSeq +}; + +const string = { + identify: value => typeof value === 'string', + default: true, + tag: 'tag:yaml.org,2002:str', + resolve: resolveSeq.resolveString, + + stringify(item, ctx, onComment, onChompKeep) { + ctx = Object.assign({ + actualString: true + }, ctx); + return resolveSeq.stringifyString(item, ctx, onComment, onChompKeep); + }, + + options: resolveSeq.strOptions +}; + +const failsafe = [map, seq, string]; + +/* global BigInt */ + +const intIdentify$2 = value => typeof value === 'bigint' || Number.isInteger(value); + +const intResolve$1 = (src, part, radix) => resolveSeq.intOptions.asBigInt ? BigInt(src) : parseInt(part, radix); + +function intStringify$1(node, radix, prefix) { + const { + value + } = node; + if (intIdentify$2(value) && value >= 0) return prefix + value.toString(radix); + return resolveSeq.stringifyNumber(node); +} + +const nullObj = { + identify: value => value == null, + createNode: (schema, value, ctx) => ctx.wrapScalars ? new resolveSeq.Scalar(null) : null, + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^(?:~|[Nn]ull|NULL)?$/, + resolve: () => null, + options: resolveSeq.nullOptions, + stringify: () => resolveSeq.nullOptions.nullStr +}; +const boolObj = { + identify: value => typeof value === 'boolean', + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, + resolve: str => str[0] === 't' || str[0] === 'T', + options: resolveSeq.boolOptions, + stringify: ({ + value + }) => value ? resolveSeq.boolOptions.trueStr : resolveSeq.boolOptions.falseStr +}; +const octObj = { + identify: value => intIdentify$2(value) && value >= 0, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'OCT', + test: /^0o([0-7]+)$/, + resolve: (str, oct) => intResolve$1(str, oct, 8), + options: resolveSeq.intOptions, + stringify: node => intStringify$1(node, 8, '0o') +}; +const intObj = { + identify: intIdentify$2, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^[-+]?[0-9]+$/, + resolve: str => intResolve$1(str, str, 10), + options: resolveSeq.intOptions, + stringify: resolveSeq.stringifyNumber +}; +const hexObj = { + identify: value => intIdentify$2(value) && value >= 0, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'HEX', + test: /^0x([0-9a-fA-F]+)$/, + resolve: (str, hex) => intResolve$1(str, hex, 16), + options: resolveSeq.intOptions, + stringify: node => intStringify$1(node, 16, '0x') +}; +const nanObj = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^(?:[-+]?\.inf|(\.nan))$/i, + resolve: (str, nan) => nan ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY, + stringify: resolveSeq.stringifyNumber +}; +const expObj = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'EXP', + test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, + resolve: str => parseFloat(str), + stringify: ({ + value + }) => Number(value).toExponential() +}; +const floatObj = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^[-+]?(?:\.([0-9]+)|[0-9]+\.([0-9]*))$/, + + resolve(str, frac1, frac2) { + const frac = frac1 || frac2; + const node = new resolveSeq.Scalar(parseFloat(str)); + if (frac && frac[frac.length - 1] === '0') node.minFractionDigits = frac.length; + return node; + }, + + stringify: resolveSeq.stringifyNumber +}; +const core = failsafe.concat([nullObj, boolObj, octObj, intObj, hexObj, nanObj, expObj, floatObj]); + +/* global BigInt */ + +const intIdentify$1 = value => typeof value === 'bigint' || Number.isInteger(value); + +const stringifyJSON = ({ + value +}) => JSON.stringify(value); + +const json = [map, seq, { + identify: value => typeof value === 'string', + default: true, + tag: 'tag:yaml.org,2002:str', + resolve: resolveSeq.resolveString, + stringify: stringifyJSON +}, { + identify: value => value == null, + createNode: (schema, value, ctx) => ctx.wrapScalars ? new resolveSeq.Scalar(null) : null, + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^null$/, + resolve: () => null, + stringify: stringifyJSON +}, { + identify: value => typeof value === 'boolean', + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^true|false$/, + resolve: str => str === 'true', + stringify: stringifyJSON +}, { + identify: intIdentify$1, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^-?(?:0|[1-9][0-9]*)$/, + resolve: str => resolveSeq.intOptions.asBigInt ? BigInt(str) : parseInt(str, 10), + stringify: ({ + value + }) => intIdentify$1(value) ? value.toString() : JSON.stringify(value) +}, { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, + resolve: str => parseFloat(str), + stringify: stringifyJSON +}]; + +json.scalarFallback = str => { + throw new SyntaxError(`Unresolved plain scalar ${JSON.stringify(str)}`); +}; + +/* global BigInt */ + +const boolStringify = ({ + value +}) => value ? resolveSeq.boolOptions.trueStr : resolveSeq.boolOptions.falseStr; + +const intIdentify = value => typeof value === 'bigint' || Number.isInteger(value); + +function intResolve(sign, src, radix) { + let str = src.replace(/_/g, ''); + + if (resolveSeq.intOptions.asBigInt) { + switch (radix) { + case 2: + str = `0b${str}`; + break; + + case 8: + str = `0o${str}`; + break; + + case 16: + str = `0x${str}`; + break; + } + + const n = BigInt(str); + return sign === '-' ? BigInt(-1) * n : n; + } + + const n = parseInt(str, radix); + return sign === '-' ? -1 * n : n; +} + +function intStringify(node, radix, prefix) { + const { + value + } = node; + + if (intIdentify(value)) { + const str = value.toString(radix); + return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; + } + + return resolveSeq.stringifyNumber(node); +} + +const yaml11 = failsafe.concat([{ + identify: value => value == null, + createNode: (schema, value, ctx) => ctx.wrapScalars ? new resolveSeq.Scalar(null) : null, + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^(?:~|[Nn]ull|NULL)?$/, + resolve: () => null, + options: resolveSeq.nullOptions, + stringify: () => resolveSeq.nullOptions.nullStr +}, { + identify: value => typeof value === 'boolean', + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, + resolve: () => true, + options: resolveSeq.boolOptions, + stringify: boolStringify +}, { + identify: value => typeof value === 'boolean', + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i, + resolve: () => false, + options: resolveSeq.boolOptions, + stringify: boolStringify +}, { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'BIN', + test: /^([-+]?)0b([0-1_]+)$/, + resolve: (str, sign, bin) => intResolve(sign, bin, 2), + stringify: node => intStringify(node, 2, '0b') +}, { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'OCT', + test: /^([-+]?)0([0-7_]+)$/, + resolve: (str, sign, oct) => intResolve(sign, oct, 8), + stringify: node => intStringify(node, 8, '0') +}, { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^([-+]?)([0-9][0-9_]*)$/, + resolve: (str, sign, abs) => intResolve(sign, abs, 10), + stringify: resolveSeq.stringifyNumber +}, { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'HEX', + test: /^([-+]?)0x([0-9a-fA-F_]+)$/, + resolve: (str, sign, hex) => intResolve(sign, hex, 16), + stringify: node => intStringify(node, 16, '0x') +}, { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^(?:[-+]?\.inf|(\.nan))$/i, + resolve: (str, nan) => nan ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY, + stringify: resolveSeq.stringifyNumber +}, { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'EXP', + test: /^[-+]?([0-9][0-9_]*)?(\.[0-9_]*)?[eE][-+]?[0-9]+$/, + resolve: str => parseFloat(str.replace(/_/g, '')), + stringify: ({ + value + }) => Number(value).toExponential() +}, { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^[-+]?(?:[0-9][0-9_]*)?\.([0-9_]*)$/, + + resolve(str, frac) { + const node = new resolveSeq.Scalar(parseFloat(str.replace(/_/g, ''))); + + if (frac) { + const f = frac.replace(/_/g, ''); + if (f[f.length - 1] === '0') node.minFractionDigits = f.length; + } + + return node; + }, + + stringify: resolveSeq.stringifyNumber +}], warnings.binary, warnings.omap, warnings.pairs, warnings.set, warnings.intTime, warnings.floatTime, warnings.timestamp); + +const schemas = { + core, + failsafe, + json, + yaml11 +}; +const tags = { + binary: warnings.binary, + bool: boolObj, + float: floatObj, + floatExp: expObj, + floatNaN: nanObj, + floatTime: warnings.floatTime, + int: intObj, + intHex: hexObj, + intOct: octObj, + intTime: warnings.intTime, + map, + null: nullObj, + omap: warnings.omap, + pairs: warnings.pairs, + seq, + set: warnings.set, + timestamp: warnings.timestamp +}; + +function findTagObject(value, tagName, tags) { + if (tagName) { + const match = tags.filter(t => t.tag === tagName); + const tagObj = match.find(t => !t.format) || match[0]; + if (!tagObj) throw new Error(`Tag ${tagName} not found`); + return tagObj; + } // TODO: deprecate/remove class check + + + return tags.find(t => (t.identify && t.identify(value) || t.class && value instanceof t.class) && !t.format); +} + +function createNode(value, tagName, ctx) { + if (value instanceof resolveSeq.Node) return value; + const { + defaultPrefix, + onTagObj, + prevObjects, + schema, + wrapScalars + } = ctx; + if (tagName && tagName.startsWith('!!')) tagName = defaultPrefix + tagName.slice(2); + let tagObj = findTagObject(value, tagName, schema.tags); + + if (!tagObj) { + if (typeof value.toJSON === 'function') value = value.toJSON(); + if (!value || typeof value !== 'object') return wrapScalars ? new resolveSeq.Scalar(value) : value; + tagObj = value instanceof Map ? map : value[Symbol.iterator] ? seq : map; + } + + if (onTagObj) { + onTagObj(tagObj); + delete ctx.onTagObj; + } // Detect duplicate references to the same object & use Alias nodes for all + // after first. The `obj` wrapper allows for circular references to resolve. + + + const obj = { + value: undefined, + node: undefined + }; + + if (value && typeof value === 'object' && prevObjects) { + const prev = prevObjects.get(value); + + if (prev) { + const alias = new resolveSeq.Alias(prev); // leaves source dirty; must be cleaned by caller + + ctx.aliasNodes.push(alias); // defined along with prevObjects + + return alias; + } + + obj.value = value; + prevObjects.set(value, obj); + } + + obj.node = tagObj.createNode ? tagObj.createNode(ctx.schema, value, ctx) : wrapScalars ? new resolveSeq.Scalar(value) : value; + if (tagName && obj.node instanceof resolveSeq.Node) obj.node.tag = tagName; + return obj.node; +} + +function getSchemaTags(schemas, knownTags, customTags, schemaId) { + let tags = schemas[schemaId.replace(/\W/g, '')]; // 'yaml-1.1' -> 'yaml11' + + if (!tags) { + const keys = Object.keys(schemas).map(key => JSON.stringify(key)).join(', '); + throw new Error(`Unknown schema "${schemaId}"; use one of ${keys}`); + } + + if (Array.isArray(customTags)) { + for (const tag of customTags) tags = tags.concat(tag); + } else if (typeof customTags === 'function') { + tags = customTags(tags.slice()); + } + + for (let i = 0; i < tags.length; ++i) { + const tag = tags[i]; + + if (typeof tag === 'string') { + const tagObj = knownTags[tag]; + + if (!tagObj) { + const keys = Object.keys(knownTags).map(key => JSON.stringify(key)).join(', '); + throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`); + } + + tags[i] = tagObj; + } + } + + return tags; +} + +const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; + +class Schema { + // TODO: remove in v2 + // TODO: remove in v2 + constructor({ + customTags, + merge, + schema, + sortMapEntries, + tags: deprecatedCustomTags + }) { + this.merge = !!merge; + this.name = schema; + this.sortMapEntries = sortMapEntries === true ? sortMapEntriesByKey : sortMapEntries || null; + if (!customTags && deprecatedCustomTags) warnings.warnOptionDeprecation('tags', 'customTags'); + this.tags = getSchemaTags(schemas, tags, customTags || deprecatedCustomTags, schema); + } + + createNode(value, wrapScalars, tagName, ctx) { + const baseCtx = { + defaultPrefix: Schema.defaultPrefix, + schema: this, + wrapScalars + }; + const createCtx = ctx ? Object.assign(ctx, baseCtx) : baseCtx; + return createNode(value, tagName, createCtx); + } + + createPair(key, value, ctx) { + if (!ctx) ctx = { + wrapScalars: true + }; + const k = this.createNode(key, ctx.wrapScalars, null, ctx); + const v = this.createNode(value, ctx.wrapScalars, null, ctx); + return new resolveSeq.Pair(k, v); + } + +} + +PlainValue._defineProperty(Schema, "defaultPrefix", PlainValue.defaultTagPrefix); + +PlainValue._defineProperty(Schema, "defaultTags", PlainValue.defaultTags); + +exports.Schema = Schema; diff --git a/node_modules/yaml/dist/index.js b/node_modules/yaml/dist/index.js new file mode 100644 index 0000000..6df1c77 --- /dev/null +++ b/node_modules/yaml/dist/index.js @@ -0,0 +1,79 @@ +'use strict'; + +var parseCst = require('./parse-cst.js'); +var Document$1 = require('./Document-9b4560a1.js'); +var Schema = require('./Schema-88e323a7.js'); +var PlainValue = require('./PlainValue-ec8e588e.js'); +var warnings = require('./warnings-1000a372.js'); +require('./resolveSeq-d03cb037.js'); + +function createNode(value, wrapScalars = true, tag) { + if (tag === undefined && typeof wrapScalars === 'string') { + tag = wrapScalars; + wrapScalars = true; + } + + const options = Object.assign({}, Document$1.Document.defaults[Document$1.defaultOptions.version], Document$1.defaultOptions); + const schema = new Schema.Schema(options); + return schema.createNode(value, wrapScalars, tag); +} + +class Document extends Document$1.Document { + constructor(options) { + super(Object.assign({}, Document$1.defaultOptions, options)); + } + +} + +function parseAllDocuments(src, options) { + const stream = []; + let prev; + + for (const cstDoc of parseCst.parse(src)) { + const doc = new Document(options); + doc.parse(cstDoc, prev); + stream.push(doc); + prev = doc; + } + + return stream; +} + +function parseDocument(src, options) { + const cst = parseCst.parse(src); + const doc = new Document(options).parse(cst[0]); + + if (cst.length > 1) { + const errMsg = 'Source contains multiple documents; please use YAML.parseAllDocuments()'; + doc.errors.unshift(new PlainValue.YAMLSemanticError(cst[1], errMsg)); + } + + return doc; +} + +function parse(src, options) { + const doc = parseDocument(src, options); + doc.warnings.forEach(warning => warnings.warn(warning)); + if (doc.errors.length > 0) throw doc.errors[0]; + return doc.toJSON(); +} + +function stringify(value, options) { + const doc = new Document(options); + doc.contents = value; + return String(doc); +} + +const YAML = { + createNode, + defaultOptions: Document$1.defaultOptions, + Document, + parse, + parseAllDocuments, + parseCST: parseCst.parse, + parseDocument, + scalarOptions: Document$1.scalarOptions, + stringify +}; + +exports.YAML = YAML; diff --git a/node_modules/yaml/dist/legacy-exports.js b/node_modules/yaml/dist/legacy-exports.js new file mode 100644 index 0000000..9a5d037 --- /dev/null +++ b/node_modules/yaml/dist/legacy-exports.js @@ -0,0 +1,16 @@ +'use strict'; + +var warnings = require('./warnings-1000a372.js'); +require('./PlainValue-ec8e588e.js'); +require('./resolveSeq-d03cb037.js'); + + + +exports.binary = warnings.binary; +exports.floatTime = warnings.floatTime; +exports.intTime = warnings.intTime; +exports.omap = warnings.omap; +exports.pairs = warnings.pairs; +exports.set = warnings.set; +exports.timestamp = warnings.timestamp; +exports.warnFileDeprecation = warnings.warnFileDeprecation; diff --git a/node_modules/yaml/dist/parse-cst.js b/node_modules/yaml/dist/parse-cst.js new file mode 100644 index 0000000..d69bedc --- /dev/null +++ b/node_modules/yaml/dist/parse-cst.js @@ -0,0 +1,1753 @@ +'use strict'; + +var PlainValue = require('./PlainValue-ec8e588e.js'); + +class BlankLine extends PlainValue.Node { + constructor() { + super(PlainValue.Type.BLANK_LINE); + } + /* istanbul ignore next */ + + + get includesTrailingLines() { + // This is never called from anywhere, but if it were, + // this is the value it should return. + return true; + } + /** + * Parses a blank line from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first \n character + * @returns {number} - Index of the character after this + */ + + + parse(context, start) { + this.context = context; + this.range = new PlainValue.Range(start, start + 1); + return start + 1; + } + +} + +class CollectionItem extends PlainValue.Node { + constructor(type, props) { + super(type, props); + this.node = null; + } + + get includesTrailingLines() { + return !!this.node && this.node.includesTrailingLines; + } + /** + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this + */ + + + parse(context, start) { + this.context = context; + const { + parseNode, + src + } = context; + let { + atLineStart, + lineStart + } = context; + if (!atLineStart && this.type === PlainValue.Type.SEQ_ITEM) this.error = new PlainValue.YAMLSemanticError(this, 'Sequence items must not have preceding content on the same line'); + const indent = atLineStart ? start - lineStart : context.indent; + let offset = PlainValue.Node.endOfWhiteSpace(src, start + 1); + let ch = src[offset]; + const inlineComment = ch === '#'; + const comments = []; + let blankLine = null; + + while (ch === '\n' || ch === '#') { + if (ch === '#') { + const end = PlainValue.Node.endOfLine(src, offset + 1); + comments.push(new PlainValue.Range(offset, end)); + offset = end; + } else { + atLineStart = true; + lineStart = offset + 1; + const wsEnd = PlainValue.Node.endOfWhiteSpace(src, lineStart); + + if (src[wsEnd] === '\n' && comments.length === 0) { + blankLine = new BlankLine(); + lineStart = blankLine.parse({ + src + }, lineStart); + } + + offset = PlainValue.Node.endOfIndent(src, lineStart); + } + + ch = src[offset]; + } + + if (PlainValue.Node.nextNodeIsIndented(ch, offset - (lineStart + indent), this.type !== PlainValue.Type.SEQ_ITEM)) { + this.node = parseNode({ + atLineStart, + inCollection: false, + indent, + lineStart, + parent: this + }, offset); + } else if (ch && lineStart > start + 1) { + offset = lineStart - 1; + } + + if (this.node) { + if (blankLine) { + // Only blank lines preceding non-empty nodes are captured. Note that + // this means that collection item range start indices do not always + // increase monotonically. -- eemeli/yaml#126 + const items = context.parent.items || context.parent.contents; + if (items) items.push(blankLine); + } + + if (comments.length) Array.prototype.push.apply(this.props, comments); + offset = this.node.range.end; + } else { + if (inlineComment) { + const c = comments[0]; + this.props.push(c); + offset = c.end; + } else { + offset = PlainValue.Node.endOfLine(src, start + 1); + } + } + + const end = this.node ? this.node.valueRange.end : offset; + this.valueRange = new PlainValue.Range(start, end); + return offset; + } + + setOrigRanges(cr, offset) { + offset = super.setOrigRanges(cr, offset); + return this.node ? this.node.setOrigRanges(cr, offset) : offset; + } + + toString() { + const { + context: { + src + }, + node, + range, + value + } = this; + if (value != null) return value; + const str = node ? src.slice(range.start, node.range.start) + String(node) : src.slice(range.start, range.end); + return PlainValue.Node.addStringTerminator(src, range.end, str); + } + +} + +class Comment extends PlainValue.Node { + constructor() { + super(PlainValue.Type.COMMENT); + } + /** + * Parses a comment line from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar + */ + + + parse(context, start) { + this.context = context; + const offset = this.parseComment(start); + this.range = new PlainValue.Range(start, offset); + return offset; + } + +} + +function grabCollectionEndComments(node) { + let cnode = node; + + while (cnode instanceof CollectionItem) cnode = cnode.node; + + if (!(cnode instanceof Collection)) return null; + const len = cnode.items.length; + let ci = -1; + + for (let i = len - 1; i >= 0; --i) { + const n = cnode.items[i]; + + if (n.type === PlainValue.Type.COMMENT) { + // Keep sufficiently indented comments with preceding node + const { + indent, + lineStart + } = n.context; + if (indent > 0 && n.range.start >= lineStart + indent) break; + ci = i; + } else if (n.type === PlainValue.Type.BLANK_LINE) ci = i;else break; + } + + if (ci === -1) return null; + const ca = cnode.items.splice(ci, len - ci); + const prevEnd = ca[0].range.start; + + while (true) { + cnode.range.end = prevEnd; + if (cnode.valueRange && cnode.valueRange.end > prevEnd) cnode.valueRange.end = prevEnd; + if (cnode === node) break; + cnode = cnode.context.parent; + } + + return ca; +} +class Collection extends PlainValue.Node { + static nextContentHasIndent(src, offset, indent) { + const lineStart = PlainValue.Node.endOfLine(src, offset) + 1; + offset = PlainValue.Node.endOfWhiteSpace(src, lineStart); + const ch = src[offset]; + if (!ch) return false; + if (offset >= lineStart + indent) return true; + if (ch !== '#' && ch !== '\n') return false; + return Collection.nextContentHasIndent(src, offset, indent); + } + + constructor(firstItem) { + super(firstItem.type === PlainValue.Type.SEQ_ITEM ? PlainValue.Type.SEQ : PlainValue.Type.MAP); + + for (let i = firstItem.props.length - 1; i >= 0; --i) { + if (firstItem.props[i].start < firstItem.context.lineStart) { + // props on previous line are assumed by the collection + this.props = firstItem.props.slice(0, i + 1); + firstItem.props = firstItem.props.slice(i + 1); + const itemRange = firstItem.props[0] || firstItem.valueRange; + firstItem.range.start = itemRange.start; + break; + } + } + + this.items = [firstItem]; + const ec = grabCollectionEndComments(firstItem); + if (ec) Array.prototype.push.apply(this.items, ec); + } + + get includesTrailingLines() { + return this.items.length > 0; + } + /** + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this + */ + + + parse(context, start) { + this.context = context; + const { + parseNode, + src + } = context; // It's easier to recalculate lineStart here rather than tracking down the + // last context from which to read it -- eemeli/yaml#2 + + let lineStart = PlainValue.Node.startOfLine(src, start); + const firstItem = this.items[0]; // First-item context needs to be correct for later comment handling + // -- eemeli/yaml#17 + + firstItem.context.parent = this; + this.valueRange = PlainValue.Range.copy(firstItem.valueRange); + const indent = firstItem.range.start - firstItem.context.lineStart; + let offset = start; + offset = PlainValue.Node.normalizeOffset(src, offset); + let ch = src[offset]; + let atLineStart = PlainValue.Node.endOfWhiteSpace(src, lineStart) === offset; + let prevIncludesTrailingLines = false; + + while (ch) { + while (ch === '\n' || ch === '#') { + if (atLineStart && ch === '\n' && !prevIncludesTrailingLines) { + const blankLine = new BlankLine(); + offset = blankLine.parse({ + src + }, offset); + this.valueRange.end = offset; + + if (offset >= src.length) { + ch = null; + break; + } + + this.items.push(blankLine); + offset -= 1; // blankLine.parse() consumes terminal newline + } else if (ch === '#') { + if (offset < lineStart + indent && !Collection.nextContentHasIndent(src, offset, indent)) { + return offset; + } + + const comment = new Comment(); + offset = comment.parse({ + indent, + lineStart, + src + }, offset); + this.items.push(comment); + this.valueRange.end = offset; + + if (offset >= src.length) { + ch = null; + break; + } + } + + lineStart = offset + 1; + offset = PlainValue.Node.endOfIndent(src, lineStart); + + if (PlainValue.Node.atBlank(src, offset)) { + const wsEnd = PlainValue.Node.endOfWhiteSpace(src, offset); + const next = src[wsEnd]; + + if (!next || next === '\n' || next === '#') { + offset = wsEnd; + } + } + + ch = src[offset]; + atLineStart = true; + } + + if (!ch) { + break; + } + + if (offset !== lineStart + indent && (atLineStart || ch !== ':')) { + if (offset < lineStart + indent) { + if (lineStart > start) offset = lineStart; + break; + } else if (!this.error) { + const msg = 'All collection items must start at the same column'; + this.error = new PlainValue.YAMLSyntaxError(this, msg); + } + } + + if (firstItem.type === PlainValue.Type.SEQ_ITEM) { + if (ch !== '-') { + if (lineStart > start) offset = lineStart; + break; + } + } else if (ch === '-' && !this.error) { + // map key may start with -, as long as it's followed by a non-whitespace char + const next = src[offset + 1]; + + if (!next || next === '\n' || next === '\t' || next === ' ') { + const msg = 'A collection cannot be both a mapping and a sequence'; + this.error = new PlainValue.YAMLSyntaxError(this, msg); + } + } + + const node = parseNode({ + atLineStart, + inCollection: true, + indent, + lineStart, + parent: this + }, offset); + if (!node) return offset; // at next document start + + this.items.push(node); + this.valueRange.end = node.valueRange.end; + offset = PlainValue.Node.normalizeOffset(src, node.range.end); + ch = src[offset]; + atLineStart = false; + prevIncludesTrailingLines = node.includesTrailingLines; // Need to reset lineStart and atLineStart here if preceding node's range + // has advanced to check the current line's indentation level + // -- eemeli/yaml#10 & eemeli/yaml#38 + + if (ch) { + let ls = offset - 1; + let prev = src[ls]; + + while (prev === ' ' || prev === '\t') prev = src[--ls]; + + if (prev === '\n') { + lineStart = ls + 1; + atLineStart = true; + } + } + + const ec = grabCollectionEndComments(node); + if (ec) Array.prototype.push.apply(this.items, ec); + } + + return offset; + } + + setOrigRanges(cr, offset) { + offset = super.setOrigRanges(cr, offset); + this.items.forEach(node => { + offset = node.setOrigRanges(cr, offset); + }); + return offset; + } + + toString() { + const { + context: { + src + }, + items, + range, + value + } = this; + if (value != null) return value; + let str = src.slice(range.start, items[0].range.start) + String(items[0]); + + for (let i = 1; i < items.length; ++i) { + const item = items[i]; + const { + atLineStart, + indent + } = item.context; + if (atLineStart) for (let i = 0; i < indent; ++i) str += ' '; + str += String(item); + } + + return PlainValue.Node.addStringTerminator(src, range.end, str); + } + +} + +class Directive extends PlainValue.Node { + constructor() { + super(PlainValue.Type.DIRECTIVE); + this.name = null; + } + + get parameters() { + const raw = this.rawValue; + return raw ? raw.trim().split(/[ \t]+/) : []; + } + + parseName(start) { + const { + src + } = this.context; + let offset = start; + let ch = src[offset]; + + while (ch && ch !== '\n' && ch !== '\t' && ch !== ' ') ch = src[offset += 1]; + + this.name = src.slice(start, offset); + return offset; + } + + parseParameters(start) { + const { + src + } = this.context; + let offset = start; + let ch = src[offset]; + + while (ch && ch !== '\n' && ch !== '#') ch = src[offset += 1]; + + this.valueRange = new PlainValue.Range(start, offset); + return offset; + } + + parse(context, start) { + this.context = context; + let offset = this.parseName(start + 1); + offset = this.parseParameters(offset); + offset = this.parseComment(offset); + this.range = new PlainValue.Range(start, offset); + return offset; + } + +} + +class Document extends PlainValue.Node { + static startCommentOrEndBlankLine(src, start) { + const offset = PlainValue.Node.endOfWhiteSpace(src, start); + const ch = src[offset]; + return ch === '#' || ch === '\n' ? offset : start; + } + + constructor() { + super(PlainValue.Type.DOCUMENT); + this.directives = null; + this.contents = null; + this.directivesEndMarker = null; + this.documentEndMarker = null; + } + + parseDirectives(start) { + const { + src + } = this.context; + this.directives = []; + let atLineStart = true; + let hasDirectives = false; + let offset = start; + + while (!PlainValue.Node.atDocumentBoundary(src, offset, PlainValue.Char.DIRECTIVES_END)) { + offset = Document.startCommentOrEndBlankLine(src, offset); + + switch (src[offset]) { + case '\n': + if (atLineStart) { + const blankLine = new BlankLine(); + offset = blankLine.parse({ + src + }, offset); + + if (offset < src.length) { + this.directives.push(blankLine); + } + } else { + offset += 1; + atLineStart = true; + } + + break; + + case '#': + { + const comment = new Comment(); + offset = comment.parse({ + src + }, offset); + this.directives.push(comment); + atLineStart = false; + } + break; + + case '%': + { + const directive = new Directive(); + offset = directive.parse({ + parent: this, + src + }, offset); + this.directives.push(directive); + hasDirectives = true; + atLineStart = false; + } + break; + + default: + if (hasDirectives) { + this.error = new PlainValue.YAMLSemanticError(this, 'Missing directives-end indicator line'); + } else if (this.directives.length > 0) { + this.contents = this.directives; + this.directives = []; + } + + return offset; + } + } + + if (src[offset]) { + this.directivesEndMarker = new PlainValue.Range(offset, offset + 3); + return offset + 3; + } + + if (hasDirectives) { + this.error = new PlainValue.YAMLSemanticError(this, 'Missing directives-end indicator line'); + } else if (this.directives.length > 0) { + this.contents = this.directives; + this.directives = []; + } + + return offset; + } + + parseContents(start) { + const { + parseNode, + src + } = this.context; + if (!this.contents) this.contents = []; + let lineStart = start; + + while (src[lineStart - 1] === '-') lineStart -= 1; + + let offset = PlainValue.Node.endOfWhiteSpace(src, start); + let atLineStart = lineStart === start; + this.valueRange = new PlainValue.Range(offset); + + while (!PlainValue.Node.atDocumentBoundary(src, offset, PlainValue.Char.DOCUMENT_END)) { + switch (src[offset]) { + case '\n': + if (atLineStart) { + const blankLine = new BlankLine(); + offset = blankLine.parse({ + src + }, offset); + + if (offset < src.length) { + this.contents.push(blankLine); + } + } else { + offset += 1; + atLineStart = true; + } + + lineStart = offset; + break; + + case '#': + { + const comment = new Comment(); + offset = comment.parse({ + src + }, offset); + this.contents.push(comment); + atLineStart = false; + } + break; + + default: + { + const iEnd = PlainValue.Node.endOfIndent(src, offset); + const context = { + atLineStart, + indent: -1, + inFlow: false, + inCollection: false, + lineStart, + parent: this + }; + const node = parseNode(context, iEnd); + if (!node) return this.valueRange.end = iEnd; // at next document start + + this.contents.push(node); + offset = node.range.end; + atLineStart = false; + const ec = grabCollectionEndComments(node); + if (ec) Array.prototype.push.apply(this.contents, ec); + } + } + + offset = Document.startCommentOrEndBlankLine(src, offset); + } + + this.valueRange.end = offset; + + if (src[offset]) { + this.documentEndMarker = new PlainValue.Range(offset, offset + 3); + offset += 3; + + if (src[offset]) { + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + + if (src[offset] === '#') { + const comment = new Comment(); + offset = comment.parse({ + src + }, offset); + this.contents.push(comment); + } + + switch (src[offset]) { + case '\n': + offset += 1; + break; + + case undefined: + break; + + default: + this.error = new PlainValue.YAMLSyntaxError(this, 'Document end marker line cannot have a non-comment suffix'); + } + } + } + + return offset; + } + /** + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this + */ + + + parse(context, start) { + context.root = this; + this.context = context; + const { + src + } = context; + let offset = src.charCodeAt(start) === 0xfeff ? start + 1 : start; // skip BOM + + offset = this.parseDirectives(offset); + offset = this.parseContents(offset); + return offset; + } + + setOrigRanges(cr, offset) { + offset = super.setOrigRanges(cr, offset); + this.directives.forEach(node => { + offset = node.setOrigRanges(cr, offset); + }); + if (this.directivesEndMarker) offset = this.directivesEndMarker.setOrigRange(cr, offset); + this.contents.forEach(node => { + offset = node.setOrigRanges(cr, offset); + }); + if (this.documentEndMarker) offset = this.documentEndMarker.setOrigRange(cr, offset); + return offset; + } + + toString() { + const { + contents, + directives, + value + } = this; + if (value != null) return value; + let str = directives.join(''); + + if (contents.length > 0) { + if (directives.length > 0 || contents[0].type === PlainValue.Type.COMMENT) str += '---\n'; + str += contents.join(''); + } + + if (str[str.length - 1] !== '\n') str += '\n'; + return str; + } + +} + +class Alias extends PlainValue.Node { + /** + * Parses an *alias from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar + */ + parse(context, start) { + this.context = context; + const { + src + } = context; + let offset = PlainValue.Node.endOfIdentifier(src, start + 1); + this.valueRange = new PlainValue.Range(start + 1, offset); + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + return offset; + } + +} + +const Chomp = { + CLIP: 'CLIP', + KEEP: 'KEEP', + STRIP: 'STRIP' +}; +class BlockValue extends PlainValue.Node { + constructor(type, props) { + super(type, props); + this.blockIndent = null; + this.chomping = Chomp.CLIP; + this.header = null; + } + + get includesTrailingLines() { + return this.chomping === Chomp.KEEP; + } + + get strValue() { + if (!this.valueRange || !this.context) return null; + let { + start, + end + } = this.valueRange; + const { + indent, + src + } = this.context; + if (this.valueRange.isEmpty()) return ''; + let lastNewLine = null; + let ch = src[end - 1]; + + while (ch === '\n' || ch === '\t' || ch === ' ') { + end -= 1; + + if (end <= start) { + if (this.chomping === Chomp.KEEP) break;else return ''; // probably never happens + } + + if (ch === '\n') lastNewLine = end; + ch = src[end - 1]; + } + + let keepStart = end + 1; + + if (lastNewLine) { + if (this.chomping === Chomp.KEEP) { + keepStart = lastNewLine; + end = this.valueRange.end; + } else { + end = lastNewLine; + } + } + + const bi = indent + this.blockIndent; + const folded = this.type === PlainValue.Type.BLOCK_FOLDED; + let atStart = true; + let str = ''; + let sep = ''; + let prevMoreIndented = false; + + for (let i = start; i < end; ++i) { + for (let j = 0; j < bi; ++j) { + if (src[i] !== ' ') break; + i += 1; + } + + const ch = src[i]; + + if (ch === '\n') { + if (sep === '\n') str += '\n';else sep = '\n'; + } else { + const lineEnd = PlainValue.Node.endOfLine(src, i); + const line = src.slice(i, lineEnd); + i = lineEnd; + + if (folded && (ch === ' ' || ch === '\t') && i < keepStart) { + if (sep === ' ') sep = '\n';else if (!prevMoreIndented && !atStart && sep === '\n') sep = '\n\n'; + str += sep + line; //+ ((lineEnd < end && src[lineEnd]) || '') + + sep = lineEnd < end && src[lineEnd] || ''; + prevMoreIndented = true; + } else { + str += sep + line; + sep = folded && i < keepStart ? ' ' : '\n'; + prevMoreIndented = false; + } + + if (atStart && line !== '') atStart = false; + } + } + + return this.chomping === Chomp.STRIP ? str : str + '\n'; + } + + parseBlockHeader(start) { + const { + src + } = this.context; + let offset = start + 1; + let bi = ''; + + while (true) { + const ch = src[offset]; + + switch (ch) { + case '-': + this.chomping = Chomp.STRIP; + break; + + case '+': + this.chomping = Chomp.KEEP; + break; + + case '0': + case '1': + case '2': + case '3': + case '4': + case '5': + case '6': + case '7': + case '8': + case '9': + bi += ch; + break; + + default: + this.blockIndent = Number(bi) || null; + this.header = new PlainValue.Range(start, offset); + return offset; + } + + offset += 1; + } + } + + parseBlockValue(start) { + const { + indent, + src + } = this.context; + const explicit = !!this.blockIndent; + let offset = start; + let valueEnd = start; + let minBlockIndent = 1; + + for (let ch = src[offset]; ch === '\n'; ch = src[offset]) { + offset += 1; + if (PlainValue.Node.atDocumentBoundary(src, offset)) break; + const end = PlainValue.Node.endOfBlockIndent(src, indent, offset); // should not include tab? + + if (end === null) break; + const ch = src[end]; + const lineIndent = end - (offset + indent); + + if (!this.blockIndent) { + // no explicit block indent, none yet detected + if (src[end] !== '\n') { + // first line with non-whitespace content + if (lineIndent < minBlockIndent) { + const msg = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; + this.error = new PlainValue.YAMLSemanticError(this, msg); + } + + this.blockIndent = lineIndent; + } else if (lineIndent > minBlockIndent) { + // empty line with more whitespace + minBlockIndent = lineIndent; + } + } else if (ch && ch !== '\n' && lineIndent < this.blockIndent) { + if (src[end] === '#') break; + + if (!this.error) { + const src = explicit ? 'explicit indentation indicator' : 'first line'; + const msg = `Block scalars must not be less indented than their ${src}`; + this.error = new PlainValue.YAMLSemanticError(this, msg); + } + } + + if (src[end] === '\n') { + offset = end; + } else { + offset = valueEnd = PlainValue.Node.endOfLine(src, end); + } + } + + if (this.chomping !== Chomp.KEEP) { + offset = src[valueEnd] ? valueEnd + 1 : valueEnd; + } + + this.valueRange = new PlainValue.Range(start + 1, offset); + return offset; + } + /** + * Parses a block value from the source + * + * Accepted forms are: + * ``` + * BS + * block + * lines + * + * BS #comment + * block + * lines + * ``` + * where the block style BS matches the regexp `[|>][-+1-9]*` and block lines + * are empty or have an indent level greater than `indent`. + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this block + */ + + + parse(context, start) { + this.context = context; + const { + src + } = context; + let offset = this.parseBlockHeader(start); + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + offset = this.parseBlockValue(offset); + return offset; + } + + setOrigRanges(cr, offset) { + offset = super.setOrigRanges(cr, offset); + return this.header ? this.header.setOrigRange(cr, offset) : offset; + } + +} + +class FlowCollection extends PlainValue.Node { + constructor(type, props) { + super(type, props); + this.items = null; + } + + prevNodeIsJsonLike(idx = this.items.length) { + const node = this.items[idx - 1]; + return !!node && (node.jsonLike || node.type === PlainValue.Type.COMMENT && this.prevNodeIsJsonLike(idx - 1)); + } + /** + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this + */ + + + parse(context, start) { + this.context = context; + const { + parseNode, + src + } = context; + let { + indent, + lineStart + } = context; + let char = src[start]; // { or [ + + this.items = [{ + char, + offset: start + }]; + let offset = PlainValue.Node.endOfWhiteSpace(src, start + 1); + char = src[offset]; + + while (char && char !== ']' && char !== '}') { + switch (char) { + case '\n': + { + lineStart = offset + 1; + const wsEnd = PlainValue.Node.endOfWhiteSpace(src, lineStart); + + if (src[wsEnd] === '\n') { + const blankLine = new BlankLine(); + lineStart = blankLine.parse({ + src + }, lineStart); + this.items.push(blankLine); + } + + offset = PlainValue.Node.endOfIndent(src, lineStart); + + if (offset <= lineStart + indent) { + char = src[offset]; + + if (offset < lineStart + indent || char !== ']' && char !== '}') { + const msg = 'Insufficient indentation in flow collection'; + this.error = new PlainValue.YAMLSemanticError(this, msg); + } + } + } + break; + + case ',': + { + this.items.push({ + char, + offset + }); + offset += 1; + } + break; + + case '#': + { + const comment = new Comment(); + offset = comment.parse({ + src + }, offset); + this.items.push(comment); + } + break; + + case '?': + case ':': + { + const next = src[offset + 1]; + + if (next === '\n' || next === '\t' || next === ' ' || next === ',' || // in-flow : after JSON-like key does not need to be followed by whitespace + char === ':' && this.prevNodeIsJsonLike()) { + this.items.push({ + char, + offset + }); + offset += 1; + break; + } + } + // fallthrough + + default: + { + const node = parseNode({ + atLineStart: false, + inCollection: false, + inFlow: true, + indent: -1, + lineStart, + parent: this + }, offset); + + if (!node) { + // at next document start + this.valueRange = new PlainValue.Range(start, offset); + return offset; + } + + this.items.push(node); + offset = PlainValue.Node.normalizeOffset(src, node.range.end); + } + } + + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + char = src[offset]; + } + + this.valueRange = new PlainValue.Range(start, offset + 1); + + if (char) { + this.items.push({ + char, + offset + }); + offset = PlainValue.Node.endOfWhiteSpace(src, offset + 1); + offset = this.parseComment(offset); + } + + return offset; + } + + setOrigRanges(cr, offset) { + offset = super.setOrigRanges(cr, offset); + this.items.forEach(node => { + if (node instanceof PlainValue.Node) { + offset = node.setOrigRanges(cr, offset); + } else if (cr.length === 0) { + node.origOffset = node.offset; + } else { + let i = offset; + + while (i < cr.length) { + if (cr[i] > node.offset) break;else ++i; + } + + node.origOffset = node.offset + i; + offset = i; + } + }); + return offset; + } + + toString() { + const { + context: { + src + }, + items, + range, + value + } = this; + if (value != null) return value; + const nodes = items.filter(item => item instanceof PlainValue.Node); + let str = ''; + let prevEnd = range.start; + nodes.forEach(node => { + const prefix = src.slice(prevEnd, node.range.start); + prevEnd = node.range.end; + str += prefix + String(node); + + if (str[str.length - 1] === '\n' && src[prevEnd - 1] !== '\n' && src[prevEnd] === '\n') { + // Comment range does not include the terminal newline, but its + // stringified value does. Without this fix, newlines at comment ends + // get duplicated. + prevEnd += 1; + } + }); + str += src.slice(prevEnd, range.end); + return PlainValue.Node.addStringTerminator(src, range.end, str); + } + +} + +class QuoteDouble extends PlainValue.Node { + static endOfQuote(src, offset) { + let ch = src[offset]; + + while (ch && ch !== '"') { + offset += ch === '\\' ? 2 : 1; + ch = src[offset]; + } + + return offset + 1; + } + /** + * @returns {string | { str: string, errors: YAMLSyntaxError[] }} + */ + + + get strValue() { + if (!this.valueRange || !this.context) return null; + const errors = []; + const { + start, + end + } = this.valueRange; + const { + indent, + src + } = this.context; + if (src[end - 1] !== '"') errors.push(new PlainValue.YAMLSyntaxError(this, 'Missing closing "quote')); // Using String#replace is too painful with escaped newlines preceded by + // escaped backslashes; also, this should be faster. + + let str = ''; + + for (let i = start + 1; i < end - 1; ++i) { + const ch = src[i]; + + if (ch === '\n') { + if (PlainValue.Node.atDocumentBoundary(src, i + 1)) errors.push(new PlainValue.YAMLSemanticError(this, 'Document boundary indicators are not allowed within string values')); + const { + fold, + offset, + error + } = PlainValue.Node.foldNewline(src, i, indent); + str += fold; + i = offset; + if (error) errors.push(new PlainValue.YAMLSemanticError(this, 'Multi-line double-quoted string needs to be sufficiently indented')); + } else if (ch === '\\') { + i += 1; + + switch (src[i]) { + case '0': + str += '\0'; + break; + // null character + + case 'a': + str += '\x07'; + break; + // bell character + + case 'b': + str += '\b'; + break; + // backspace + + case 'e': + str += '\x1b'; + break; + // escape character + + case 'f': + str += '\f'; + break; + // form feed + + case 'n': + str += '\n'; + break; + // line feed + + case 'r': + str += '\r'; + break; + // carriage return + + case 't': + str += '\t'; + break; + // horizontal tab + + case 'v': + str += '\v'; + break; + // vertical tab + + case 'N': + str += '\u0085'; + break; + // Unicode next line + + case '_': + str += '\u00a0'; + break; + // Unicode non-breaking space + + case 'L': + str += '\u2028'; + break; + // Unicode line separator + + case 'P': + str += '\u2029'; + break; + // Unicode paragraph separator + + case ' ': + str += ' '; + break; + + case '"': + str += '"'; + break; + + case '/': + str += '/'; + break; + + case '\\': + str += '\\'; + break; + + case '\t': + str += '\t'; + break; + + case 'x': + str += this.parseCharCode(i + 1, 2, errors); + i += 2; + break; + + case 'u': + str += this.parseCharCode(i + 1, 4, errors); + i += 4; + break; + + case 'U': + str += this.parseCharCode(i + 1, 8, errors); + i += 8; + break; + + case '\n': + // skip escaped newlines, but still trim the following line + while (src[i + 1] === ' ' || src[i + 1] === '\t') i += 1; + + break; + + default: + errors.push(new PlainValue.YAMLSyntaxError(this, `Invalid escape sequence ${src.substr(i - 1, 2)}`)); + str += '\\' + src[i]; + } + } else if (ch === ' ' || ch === '\t') { + // trim trailing whitespace + const wsStart = i; + let next = src[i + 1]; + + while (next === ' ' || next === '\t') { + i += 1; + next = src[i + 1]; + } + + if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch; + } else { + str += ch; + } + } + + return errors.length > 0 ? { + errors, + str + } : str; + } + + parseCharCode(offset, length, errors) { + const { + src + } = this.context; + const cc = src.substr(offset, length); + const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); + const code = ok ? parseInt(cc, 16) : NaN; + + if (isNaN(code)) { + errors.push(new PlainValue.YAMLSyntaxError(this, `Invalid escape sequence ${src.substr(offset - 2, length + 2)}`)); + return src.substr(offset - 2, length + 2); + } + + return String.fromCodePoint(code); + } + /** + * Parses a "double quoted" value from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar + */ + + + parse(context, start) { + this.context = context; + const { + src + } = context; + let offset = QuoteDouble.endOfQuote(src, start + 1); + this.valueRange = new PlainValue.Range(start, offset); + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + return offset; + } + +} + +class QuoteSingle extends PlainValue.Node { + static endOfQuote(src, offset) { + let ch = src[offset]; + + while (ch) { + if (ch === "'") { + if (src[offset + 1] !== "'") break; + ch = src[offset += 2]; + } else { + ch = src[offset += 1]; + } + } + + return offset + 1; + } + /** + * @returns {string | { str: string, errors: YAMLSyntaxError[] }} + */ + + + get strValue() { + if (!this.valueRange || !this.context) return null; + const errors = []; + const { + start, + end + } = this.valueRange; + const { + indent, + src + } = this.context; + if (src[end - 1] !== "'") errors.push(new PlainValue.YAMLSyntaxError(this, "Missing closing 'quote")); + let str = ''; + + for (let i = start + 1; i < end - 1; ++i) { + const ch = src[i]; + + if (ch === '\n') { + if (PlainValue.Node.atDocumentBoundary(src, i + 1)) errors.push(new PlainValue.YAMLSemanticError(this, 'Document boundary indicators are not allowed within string values')); + const { + fold, + offset, + error + } = PlainValue.Node.foldNewline(src, i, indent); + str += fold; + i = offset; + if (error) errors.push(new PlainValue.YAMLSemanticError(this, 'Multi-line single-quoted string needs to be sufficiently indented')); + } else if (ch === "'") { + str += ch; + i += 1; + if (src[i] !== "'") errors.push(new PlainValue.YAMLSyntaxError(this, 'Unescaped single quote? This should not happen.')); + } else if (ch === ' ' || ch === '\t') { + // trim trailing whitespace + const wsStart = i; + let next = src[i + 1]; + + while (next === ' ' || next === '\t') { + i += 1; + next = src[i + 1]; + } + + if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch; + } else { + str += ch; + } + } + + return errors.length > 0 ? { + errors, + str + } : str; + } + /** + * Parses a 'single quoted' value from the source + * + * @param {ParseContext} context + * @param {number} start - Index of first character + * @returns {number} - Index of the character after this scalar + */ + + + parse(context, start) { + this.context = context; + const { + src + } = context; + let offset = QuoteSingle.endOfQuote(src, start + 1); + this.valueRange = new PlainValue.Range(start, offset); + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + offset = this.parseComment(offset); + return offset; + } + +} + +function createNewNode(type, props) { + switch (type) { + case PlainValue.Type.ALIAS: + return new Alias(type, props); + + case PlainValue.Type.BLOCK_FOLDED: + case PlainValue.Type.BLOCK_LITERAL: + return new BlockValue(type, props); + + case PlainValue.Type.FLOW_MAP: + case PlainValue.Type.FLOW_SEQ: + return new FlowCollection(type, props); + + case PlainValue.Type.MAP_KEY: + case PlainValue.Type.MAP_VALUE: + case PlainValue.Type.SEQ_ITEM: + return new CollectionItem(type, props); + + case PlainValue.Type.COMMENT: + case PlainValue.Type.PLAIN: + return new PlainValue.PlainValue(type, props); + + case PlainValue.Type.QUOTE_DOUBLE: + return new QuoteDouble(type, props); + + case PlainValue.Type.QUOTE_SINGLE: + return new QuoteSingle(type, props); + + /* istanbul ignore next */ + + default: + return null; + // should never happen + } +} +/** + * @param {boolean} atLineStart - Node starts at beginning of line + * @param {boolean} inFlow - true if currently in a flow context + * @param {boolean} inCollection - true if currently in a collection context + * @param {number} indent - Current level of indentation + * @param {number} lineStart - Start of the current line + * @param {Node} parent - The parent of the node + * @param {string} src - Source of the YAML document + */ + + +class ParseContext { + static parseType(src, offset, inFlow) { + switch (src[offset]) { + case '*': + return PlainValue.Type.ALIAS; + + case '>': + return PlainValue.Type.BLOCK_FOLDED; + + case '|': + return PlainValue.Type.BLOCK_LITERAL; + + case '{': + return PlainValue.Type.FLOW_MAP; + + case '[': + return PlainValue.Type.FLOW_SEQ; + + case '?': + return !inFlow && PlainValue.Node.atBlank(src, offset + 1, true) ? PlainValue.Type.MAP_KEY : PlainValue.Type.PLAIN; + + case ':': + return !inFlow && PlainValue.Node.atBlank(src, offset + 1, true) ? PlainValue.Type.MAP_VALUE : PlainValue.Type.PLAIN; + + case '-': + return !inFlow && PlainValue.Node.atBlank(src, offset + 1, true) ? PlainValue.Type.SEQ_ITEM : PlainValue.Type.PLAIN; + + case '"': + return PlainValue.Type.QUOTE_DOUBLE; + + case "'": + return PlainValue.Type.QUOTE_SINGLE; + + default: + return PlainValue.Type.PLAIN; + } + } + + constructor(orig = {}, { + atLineStart, + inCollection, + inFlow, + indent, + lineStart, + parent + } = {}) { + PlainValue._defineProperty(this, "parseNode", (overlay, start) => { + if (PlainValue.Node.atDocumentBoundary(this.src, start)) return null; + const context = new ParseContext(this, overlay); + const { + props, + type, + valueStart + } = context.parseProps(start); + const node = createNewNode(type, props); + let offset = node.parse(context, valueStart); + node.range = new PlainValue.Range(start, offset); + /* istanbul ignore if */ + + if (offset <= start) { + // This should never happen, but if it does, let's make sure to at least + // step one character forward to avoid a busy loop. + node.error = new Error(`Node#parse consumed no characters`); + node.error.parseEnd = offset; + node.error.source = node; + node.range.end = start + 1; + } + + if (context.nodeStartsCollection(node)) { + if (!node.error && !context.atLineStart && context.parent.type === PlainValue.Type.DOCUMENT) { + node.error = new PlainValue.YAMLSyntaxError(node, 'Block collection must not have preceding content here (e.g. directives-end indicator)'); + } + + const collection = new Collection(node); + offset = collection.parse(new ParseContext(context), offset); + collection.range = new PlainValue.Range(start, offset); + return collection; + } + + return node; + }); + + this.atLineStart = atLineStart != null ? atLineStart : orig.atLineStart || false; + this.inCollection = inCollection != null ? inCollection : orig.inCollection || false; + this.inFlow = inFlow != null ? inFlow : orig.inFlow || false; + this.indent = indent != null ? indent : orig.indent; + this.lineStart = lineStart != null ? lineStart : orig.lineStart; + this.parent = parent != null ? parent : orig.parent || {}; + this.root = orig.root; + this.src = orig.src; + } + + nodeStartsCollection(node) { + const { + inCollection, + inFlow, + src + } = this; + if (inCollection || inFlow) return false; + if (node instanceof CollectionItem) return true; // check for implicit key + + let offset = node.range.end; + if (src[offset] === '\n' || src[offset - 1] === '\n') return false; + offset = PlainValue.Node.endOfWhiteSpace(src, offset); + return src[offset] === ':'; + } // Anchor and tag are before type, which determines the node implementation + // class; hence this intermediate step. + + + parseProps(offset) { + const { + inFlow, + parent, + src + } = this; + const props = []; + let lineHasProps = false; + offset = this.atLineStart ? PlainValue.Node.endOfIndent(src, offset) : PlainValue.Node.endOfWhiteSpace(src, offset); + let ch = src[offset]; + + while (ch === PlainValue.Char.ANCHOR || ch === PlainValue.Char.COMMENT || ch === PlainValue.Char.TAG || ch === '\n') { + if (ch === '\n') { + let inEnd = offset; + let lineStart; + + do { + lineStart = inEnd + 1; + inEnd = PlainValue.Node.endOfIndent(src, lineStart); + } while (src[inEnd] === '\n'); + + const indentDiff = inEnd - (lineStart + this.indent); + const noIndicatorAsIndent = parent.type === PlainValue.Type.SEQ_ITEM && parent.context.atLineStart; + if (src[inEnd] !== '#' && !PlainValue.Node.nextNodeIsIndented(src[inEnd], indentDiff, !noIndicatorAsIndent)) break; + this.atLineStart = true; + this.lineStart = lineStart; + lineHasProps = false; + offset = inEnd; + } else if (ch === PlainValue.Char.COMMENT) { + const end = PlainValue.Node.endOfLine(src, offset + 1); + props.push(new PlainValue.Range(offset, end)); + offset = end; + } else { + let end = PlainValue.Node.endOfIdentifier(src, offset + 1); + + if (ch === PlainValue.Char.TAG && src[end] === ',' && /^[a-zA-Z0-9-]+\.[a-zA-Z0-9-]+,\d\d\d\d(-\d\d){0,2}\/\S/.test(src.slice(offset + 1, end + 13))) { + // Let's presume we're dealing with a YAML 1.0 domain tag here, rather + // than an empty but 'foo.bar' private-tagged node in a flow collection + // followed without whitespace by a plain string starting with a year + // or date divided by something. + end = PlainValue.Node.endOfIdentifier(src, end + 5); + } + + props.push(new PlainValue.Range(offset, end)); + lineHasProps = true; + offset = PlainValue.Node.endOfWhiteSpace(src, end); + } + + ch = src[offset]; + } // '- &a : b' has an anchor on an empty node + + + if (lineHasProps && ch === ':' && PlainValue.Node.atBlank(src, offset + 1, true)) offset -= 1; + const type = ParseContext.parseType(src, offset, inFlow); + return { + props, + type, + valueStart: offset + }; + } + /** + * Parses a node from the source + * @param {ParseContext} overlay + * @param {number} start - Index of first non-whitespace character for the node + * @returns {?Node} - null if at a document boundary + */ + + +} + +// Published as 'yaml/parse-cst' +function parse(src) { + const cr = []; + + if (src.indexOf('\r') !== -1) { + src = src.replace(/\r\n?/g, (match, offset) => { + if (match.length > 1) cr.push(offset); + return '\n'; + }); + } + + const documents = []; + let offset = 0; + + do { + const doc = new Document(); + const context = new ParseContext({ + src + }); + offset = doc.parse(context, offset); + documents.push(doc); + } while (offset < src.length); + + documents.setOrigRanges = () => { + if (cr.length === 0) return false; + + for (let i = 1; i < cr.length; ++i) cr[i] -= i; + + let crOffset = 0; + + for (let i = 0; i < documents.length; ++i) { + crOffset = documents[i].setOrigRanges(cr, crOffset); + } + + cr.splice(0, cr.length); + return true; + }; + + documents.toString = () => documents.join('...\n'); + + return documents; +} + +exports.parse = parse; diff --git a/node_modules/yaml/dist/resolveSeq-d03cb037.js b/node_modules/yaml/dist/resolveSeq-d03cb037.js new file mode 100644 index 0000000..d7c7c8c --- /dev/null +++ b/node_modules/yaml/dist/resolveSeq-d03cb037.js @@ -0,0 +1,2161 @@ +'use strict'; + +var PlainValue = require('./PlainValue-ec8e588e.js'); + +function addCommentBefore(str, indent, comment) { + if (!comment) return str; + const cc = comment.replace(/[\s\S]^/gm, `$&${indent}#`); + return `#${cc}\n${indent}${str}`; +} +function addComment(str, indent, comment) { + return !comment ? str : comment.indexOf('\n') === -1 ? `${str} #${comment}` : `${str}\n` + comment.replace(/^/gm, `${indent || ''}#`); +} + +class Node {} + +function toJSON(value, arg, ctx) { + if (Array.isArray(value)) return value.map((v, i) => toJSON(v, String(i), ctx)); + + if (value && typeof value.toJSON === 'function') { + const anchor = ctx && ctx.anchors && ctx.anchors.get(value); + if (anchor) ctx.onCreate = res => { + anchor.res = res; + delete ctx.onCreate; + }; + const res = value.toJSON(arg, ctx); + if (anchor && ctx.onCreate) ctx.onCreate(res); + return res; + } + + if ((!ctx || !ctx.keep) && typeof value === 'bigint') return Number(value); + return value; +} + +class Scalar extends Node { + constructor(value) { + super(); + this.value = value; + } + + toJSON(arg, ctx) { + return ctx && ctx.keep ? this.value : toJSON(this.value, arg, ctx); + } + + toString() { + return String(this.value); + } + +} + +function collectionFromPath(schema, path, value) { + let v = value; + + for (let i = path.length - 1; i >= 0; --i) { + const k = path[i]; + + if (Number.isInteger(k) && k >= 0) { + const a = []; + a[k] = v; + v = a; + } else { + const o = {}; + Object.defineProperty(o, k, { + value: v, + writable: true, + enumerable: true, + configurable: true + }); + v = o; + } + } + + return schema.createNode(v, false); +} // null, undefined, or an empty non-string iterable (e.g. []) + + +const isEmptyPath = path => path == null || typeof path === 'object' && path[Symbol.iterator]().next().done; +class Collection extends Node { + constructor(schema) { + super(); + + PlainValue._defineProperty(this, "items", []); + + this.schema = schema; + } + + addIn(path, value) { + if (isEmptyPath(path)) this.add(value);else { + const [key, ...rest] = path; + const node = this.get(key, true); + if (node instanceof Collection) node.addIn(rest, value);else if (node === undefined && this.schema) this.set(key, collectionFromPath(this.schema, rest, value));else throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); + } + } + + deleteIn([key, ...rest]) { + if (rest.length === 0) return this.delete(key); + const node = this.get(key, true); + if (node instanceof Collection) return node.deleteIn(rest);else throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); + } + + getIn([key, ...rest], keepScalar) { + const node = this.get(key, true); + if (rest.length === 0) return !keepScalar && node instanceof Scalar ? node.value : node;else return node instanceof Collection ? node.getIn(rest, keepScalar) : undefined; + } + + hasAllNullValues() { + return this.items.every(node => { + if (!node || node.type !== 'PAIR') return false; + const n = node.value; + return n == null || n instanceof Scalar && n.value == null && !n.commentBefore && !n.comment && !n.tag; + }); + } + + hasIn([key, ...rest]) { + if (rest.length === 0) return this.has(key); + const node = this.get(key, true); + return node instanceof Collection ? node.hasIn(rest) : false; + } + + setIn([key, ...rest], value) { + if (rest.length === 0) { + this.set(key, value); + } else { + const node = this.get(key, true); + if (node instanceof Collection) node.setIn(rest, value);else if (node === undefined && this.schema) this.set(key, collectionFromPath(this.schema, rest, value));else throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); + } + } // overridden in implementations + + /* istanbul ignore next */ + + + toJSON() { + return null; + } + + toString(ctx, { + blockItem, + flowChars, + isMap, + itemIndent + }, onComment, onChompKeep) { + const { + indent, + indentStep, + stringify + } = ctx; + const inFlow = this.type === PlainValue.Type.FLOW_MAP || this.type === PlainValue.Type.FLOW_SEQ || ctx.inFlow; + if (inFlow) itemIndent += indentStep; + const allNullValues = isMap && this.hasAllNullValues(); + ctx = Object.assign({}, ctx, { + allNullValues, + indent: itemIndent, + inFlow, + type: null + }); + let chompKeep = false; + let hasItemWithNewLine = false; + const nodes = this.items.reduce((nodes, item, i) => { + let comment; + + if (item) { + if (!chompKeep && item.spaceBefore) nodes.push({ + type: 'comment', + str: '' + }); + if (item.commentBefore) item.commentBefore.match(/^.*$/gm).forEach(line => { + nodes.push({ + type: 'comment', + str: `#${line}` + }); + }); + if (item.comment) comment = item.comment; + if (inFlow && (!chompKeep && item.spaceBefore || item.commentBefore || item.comment || item.key && (item.key.commentBefore || item.key.comment) || item.value && (item.value.commentBefore || item.value.comment))) hasItemWithNewLine = true; + } + + chompKeep = false; + let str = stringify(item, ctx, () => comment = null, () => chompKeep = true); + if (inFlow && !hasItemWithNewLine && str.includes('\n')) hasItemWithNewLine = true; + if (inFlow && i < this.items.length - 1) str += ','; + str = addComment(str, itemIndent, comment); + if (chompKeep && (comment || inFlow)) chompKeep = false; + nodes.push({ + type: 'item', + str + }); + return nodes; + }, []); + let str; + + if (nodes.length === 0) { + str = flowChars.start + flowChars.end; + } else if (inFlow) { + const { + start, + end + } = flowChars; + const strings = nodes.map(n => n.str); + + if (hasItemWithNewLine || strings.reduce((sum, str) => sum + str.length + 2, 2) > Collection.maxFlowStringSingleLineLength) { + str = start; + + for (const s of strings) { + str += s ? `\n${indentStep}${indent}${s}` : '\n'; + } + + str += `\n${indent}${end}`; + } else { + str = `${start} ${strings.join(' ')} ${end}`; + } + } else { + const strings = nodes.map(blockItem); + str = strings.shift(); + + for (const s of strings) str += s ? `\n${indent}${s}` : '\n'; + } + + if (this.comment) { + str += '\n' + this.comment.replace(/^/gm, `${indent}#`); + if (onComment) onComment(); + } else if (chompKeep && onChompKeep) onChompKeep(); + + return str; + } + +} + +PlainValue._defineProperty(Collection, "maxFlowStringSingleLineLength", 60); + +function asItemIndex(key) { + let idx = key instanceof Scalar ? key.value : key; + if (idx && typeof idx === 'string') idx = Number(idx); + return Number.isInteger(idx) && idx >= 0 ? idx : null; +} + +class YAMLSeq extends Collection { + add(value) { + this.items.push(value); + } + + delete(key) { + const idx = asItemIndex(key); + if (typeof idx !== 'number') return false; + const del = this.items.splice(idx, 1); + return del.length > 0; + } + + get(key, keepScalar) { + const idx = asItemIndex(key); + if (typeof idx !== 'number') return undefined; + const it = this.items[idx]; + return !keepScalar && it instanceof Scalar ? it.value : it; + } + + has(key) { + const idx = asItemIndex(key); + return typeof idx === 'number' && idx < this.items.length; + } + + set(key, value) { + const idx = asItemIndex(key); + if (typeof idx !== 'number') throw new Error(`Expected a valid index, not ${key}.`); + this.items[idx] = value; + } + + toJSON(_, ctx) { + const seq = []; + if (ctx && ctx.onCreate) ctx.onCreate(seq); + let i = 0; + + for (const item of this.items) seq.push(toJSON(item, String(i++), ctx)); + + return seq; + } + + toString(ctx, onComment, onChompKeep) { + if (!ctx) return JSON.stringify(this); + return super.toString(ctx, { + blockItem: n => n.type === 'comment' ? n.str : `- ${n.str}`, + flowChars: { + start: '[', + end: ']' + }, + isMap: false, + itemIndent: (ctx.indent || '') + ' ' + }, onComment, onChompKeep); + } + +} + +const stringifyKey = (key, jsKey, ctx) => { + if (jsKey === null) return ''; + if (typeof jsKey !== 'object') return String(jsKey); + if (key instanceof Node && ctx && ctx.doc) return key.toString({ + anchors: Object.create(null), + doc: ctx.doc, + indent: '', + indentStep: ctx.indentStep, + inFlow: true, + inStringifyKey: true, + stringify: ctx.stringify + }); + return JSON.stringify(jsKey); +}; + +class Pair extends Node { + constructor(key, value = null) { + super(); + this.key = key; + this.value = value; + this.type = Pair.Type.PAIR; + } + + get commentBefore() { + return this.key instanceof Node ? this.key.commentBefore : undefined; + } + + set commentBefore(cb) { + if (this.key == null) this.key = new Scalar(null); + if (this.key instanceof Node) this.key.commentBefore = cb;else { + const msg = 'Pair.commentBefore is an alias for Pair.key.commentBefore. To set it, the key must be a Node.'; + throw new Error(msg); + } + } + + addToJSMap(ctx, map) { + const key = toJSON(this.key, '', ctx); + + if (map instanceof Map) { + const value = toJSON(this.value, key, ctx); + map.set(key, value); + } else if (map instanceof Set) { + map.add(key); + } else { + const stringKey = stringifyKey(this.key, key, ctx); + const value = toJSON(this.value, stringKey, ctx); + if (stringKey in map) Object.defineProperty(map, stringKey, { + value, + writable: true, + enumerable: true, + configurable: true + });else map[stringKey] = value; + } + + return map; + } + + toJSON(_, ctx) { + const pair = ctx && ctx.mapAsMap ? new Map() : {}; + return this.addToJSMap(ctx, pair); + } + + toString(ctx, onComment, onChompKeep) { + if (!ctx || !ctx.doc) return JSON.stringify(this); + const { + indent: indentSize, + indentSeq, + simpleKeys + } = ctx.doc.options; + let { + key, + value + } = this; + let keyComment = key instanceof Node && key.comment; + + if (simpleKeys) { + if (keyComment) { + throw new Error('With simple keys, key nodes cannot have comments'); + } + + if (key instanceof Collection) { + const msg = 'With simple keys, collection cannot be used as a key value'; + throw new Error(msg); + } + } + + let explicitKey = !simpleKeys && (!key || keyComment || (key instanceof Node ? key instanceof Collection || key.type === PlainValue.Type.BLOCK_FOLDED || key.type === PlainValue.Type.BLOCK_LITERAL : typeof key === 'object')); + const { + doc, + indent, + indentStep, + stringify + } = ctx; + ctx = Object.assign({}, ctx, { + implicitKey: !explicitKey, + indent: indent + indentStep + }); + let chompKeep = false; + let str = stringify(key, ctx, () => keyComment = null, () => chompKeep = true); + str = addComment(str, ctx.indent, keyComment); + + if (!explicitKey && str.length > 1024) { + if (simpleKeys) throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); + explicitKey = true; + } + + if (ctx.allNullValues && !simpleKeys) { + if (this.comment) { + str = addComment(str, ctx.indent, this.comment); + if (onComment) onComment(); + } else if (chompKeep && !keyComment && onChompKeep) onChompKeep(); + + return ctx.inFlow && !explicitKey ? str : `? ${str}`; + } + + str = explicitKey ? `? ${str}\n${indent}:` : `${str}:`; + + if (this.comment) { + // expected (but not strictly required) to be a single-line comment + str = addComment(str, ctx.indent, this.comment); + if (onComment) onComment(); + } + + let vcb = ''; + let valueComment = null; + + if (value instanceof Node) { + if (value.spaceBefore) vcb = '\n'; + + if (value.commentBefore) { + const cs = value.commentBefore.replace(/^/gm, `${ctx.indent}#`); + vcb += `\n${cs}`; + } + + valueComment = value.comment; + } else if (value && typeof value === 'object') { + value = doc.schema.createNode(value, true); + } + + ctx.implicitKey = false; + if (!explicitKey && !this.comment && value instanceof Scalar) ctx.indentAtStart = str.length + 1; + chompKeep = false; + + if (!indentSeq && indentSize >= 2 && !ctx.inFlow && !explicitKey && value instanceof YAMLSeq && value.type !== PlainValue.Type.FLOW_SEQ && !value.tag && !doc.anchors.getName(value)) { + // If indentSeq === false, consider '- ' as part of indentation where possible + ctx.indent = ctx.indent.substr(2); + } + + const valueStr = stringify(value, ctx, () => valueComment = null, () => chompKeep = true); + let ws = ' '; + + if (vcb || this.comment) { + ws = `${vcb}\n${ctx.indent}`; + } else if (!explicitKey && value instanceof Collection) { + const flow = valueStr[0] === '[' || valueStr[0] === '{'; + if (!flow || valueStr.includes('\n')) ws = `\n${ctx.indent}`; + } else if (valueStr[0] === '\n') ws = ''; + + if (chompKeep && !valueComment && onChompKeep) onChompKeep(); + return addComment(str + ws + valueStr, ctx.indent, valueComment); + } + +} + +PlainValue._defineProperty(Pair, "Type", { + PAIR: 'PAIR', + MERGE_PAIR: 'MERGE_PAIR' +}); + +const getAliasCount = (node, anchors) => { + if (node instanceof Alias) { + const anchor = anchors.get(node.source); + return anchor.count * anchor.aliasCount; + } else if (node instanceof Collection) { + let count = 0; + + for (const item of node.items) { + const c = getAliasCount(item, anchors); + if (c > count) count = c; + } + + return count; + } else if (node instanceof Pair) { + const kc = getAliasCount(node.key, anchors); + const vc = getAliasCount(node.value, anchors); + return Math.max(kc, vc); + } + + return 1; +}; + +class Alias extends Node { + static stringify({ + range, + source + }, { + anchors, + doc, + implicitKey, + inStringifyKey + }) { + let anchor = Object.keys(anchors).find(a => anchors[a] === source); + if (!anchor && inStringifyKey) anchor = doc.anchors.getName(source) || doc.anchors.newName(); + if (anchor) return `*${anchor}${implicitKey ? ' ' : ''}`; + const msg = doc.anchors.getName(source) ? 'Alias node must be after source node' : 'Source node not found for alias node'; + throw new Error(`${msg} [${range}]`); + } + + constructor(source) { + super(); + this.source = source; + this.type = PlainValue.Type.ALIAS; + } + + set tag(t) { + throw new Error('Alias nodes cannot have tags'); + } + + toJSON(arg, ctx) { + if (!ctx) return toJSON(this.source, arg, ctx); + const { + anchors, + maxAliasCount + } = ctx; + const anchor = anchors.get(this.source); + /* istanbul ignore if */ + + if (!anchor || anchor.res === undefined) { + const msg = 'This should not happen: Alias anchor was not resolved?'; + if (this.cstNode) throw new PlainValue.YAMLReferenceError(this.cstNode, msg);else throw new ReferenceError(msg); + } + + if (maxAliasCount >= 0) { + anchor.count += 1; + if (anchor.aliasCount === 0) anchor.aliasCount = getAliasCount(this.source, anchors); + + if (anchor.count * anchor.aliasCount > maxAliasCount) { + const msg = 'Excessive alias count indicates a resource exhaustion attack'; + if (this.cstNode) throw new PlainValue.YAMLReferenceError(this.cstNode, msg);else throw new ReferenceError(msg); + } + } + + return anchor.res; + } // Only called when stringifying an alias mapping key while constructing + // Object output. + + + toString(ctx) { + return Alias.stringify(this, ctx); + } + +} + +PlainValue._defineProperty(Alias, "default", true); + +function findPair(items, key) { + const k = key instanceof Scalar ? key.value : key; + + for (const it of items) { + if (it instanceof Pair) { + if (it.key === key || it.key === k) return it; + if (it.key && it.key.value === k) return it; + } + } + + return undefined; +} +class YAMLMap extends Collection { + add(pair, overwrite) { + if (!pair) pair = new Pair(pair);else if (!(pair instanceof Pair)) pair = new Pair(pair.key || pair, pair.value); + const prev = findPair(this.items, pair.key); + const sortEntries = this.schema && this.schema.sortMapEntries; + + if (prev) { + if (overwrite) prev.value = pair.value;else throw new Error(`Key ${pair.key} already set`); + } else if (sortEntries) { + const i = this.items.findIndex(item => sortEntries(pair, item) < 0); + if (i === -1) this.items.push(pair);else this.items.splice(i, 0, pair); + } else { + this.items.push(pair); + } + } + + delete(key) { + const it = findPair(this.items, key); + if (!it) return false; + const del = this.items.splice(this.items.indexOf(it), 1); + return del.length > 0; + } + + get(key, keepScalar) { + const it = findPair(this.items, key); + const node = it && it.value; + return !keepScalar && node instanceof Scalar ? node.value : node; + } + + has(key) { + return !!findPair(this.items, key); + } + + set(key, value) { + this.add(new Pair(key, value), true); + } + /** + * @param {*} arg ignored + * @param {*} ctx Conversion context, originally set in Document#toJSON() + * @param {Class} Type If set, forces the returned collection type + * @returns {*} Instance of Type, Map, or Object + */ + + + toJSON(_, ctx, Type) { + const map = Type ? new Type() : ctx && ctx.mapAsMap ? new Map() : {}; + if (ctx && ctx.onCreate) ctx.onCreate(map); + + for (const item of this.items) item.addToJSMap(ctx, map); + + return map; + } + + toString(ctx, onComment, onChompKeep) { + if (!ctx) return JSON.stringify(this); + + for (const item of this.items) { + if (!(item instanceof Pair)) throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); + } + + return super.toString(ctx, { + blockItem: n => n.str, + flowChars: { + start: '{', + end: '}' + }, + isMap: true, + itemIndent: ctx.indent || '' + }, onComment, onChompKeep); + } + +} + +const MERGE_KEY = '<<'; +class Merge extends Pair { + constructor(pair) { + if (pair instanceof Pair) { + let seq = pair.value; + + if (!(seq instanceof YAMLSeq)) { + seq = new YAMLSeq(); + seq.items.push(pair.value); + seq.range = pair.value.range; + } + + super(pair.key, seq); + this.range = pair.range; + } else { + super(new Scalar(MERGE_KEY), new YAMLSeq()); + } + + this.type = Pair.Type.MERGE_PAIR; + } // If the value associated with a merge key is a single mapping node, each of + // its key/value pairs is inserted into the current mapping, unless the key + // already exists in it. If the value associated with the merge key is a + // sequence, then this sequence is expected to contain mapping nodes and each + // of these nodes is merged in turn according to its order in the sequence. + // Keys in mapping nodes earlier in the sequence override keys specified in + // later mapping nodes. -- http://yaml.org/type/merge.html + + + addToJSMap(ctx, map) { + for (const { + source + } of this.value.items) { + if (!(source instanceof YAMLMap)) throw new Error('Merge sources must be maps'); + const srcMap = source.toJSON(null, ctx, Map); + + for (const [key, value] of srcMap) { + if (map instanceof Map) { + if (!map.has(key)) map.set(key, value); + } else if (map instanceof Set) { + map.add(key); + } else if (!Object.prototype.hasOwnProperty.call(map, key)) { + Object.defineProperty(map, key, { + value, + writable: true, + enumerable: true, + configurable: true + }); + } + } + } + + return map; + } + + toString(ctx, onComment) { + const seq = this.value; + if (seq.items.length > 1) return super.toString(ctx, onComment); + this.value = seq.items[0]; + const str = super.toString(ctx, onComment); + this.value = seq; + return str; + } + +} + +const binaryOptions = { + defaultType: PlainValue.Type.BLOCK_LITERAL, + lineWidth: 76 +}; +const boolOptions = { + trueStr: 'true', + falseStr: 'false' +}; +const intOptions = { + asBigInt: false +}; +const nullOptions = { + nullStr: 'null' +}; +const strOptions = { + defaultType: PlainValue.Type.PLAIN, + doubleQuoted: { + jsonEncoding: false, + minMultiLineLength: 40 + }, + fold: { + lineWidth: 80, + minContentWidth: 20 + } +}; + +function resolveScalar(str, tags, scalarFallback) { + for (const { + format, + test, + resolve + } of tags) { + if (test) { + const match = str.match(test); + + if (match) { + let res = resolve.apply(null, match); + if (!(res instanceof Scalar)) res = new Scalar(res); + if (format) res.format = format; + return res; + } + } + } + + if (scalarFallback) str = scalarFallback(str); + return new Scalar(str); +} + +const FOLD_FLOW = 'flow'; +const FOLD_BLOCK = 'block'; +const FOLD_QUOTED = 'quoted'; // presumes i+1 is at the start of a line +// returns index of last newline in more-indented block + +const consumeMoreIndentedLines = (text, i) => { + let ch = text[i + 1]; + + while (ch === ' ' || ch === '\t') { + do { + ch = text[i += 1]; + } while (ch && ch !== '\n'); + + ch = text[i + 1]; + } + + return i; +}; +/** + * Tries to keep input at up to `lineWidth` characters, splitting only on spaces + * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are + * terminated with `\n` and started with `indent`. + * + * @param {string} text + * @param {string} indent + * @param {string} [mode='flow'] `'block'` prevents more-indented lines + * from being folded; `'quoted'` allows for `\` escapes, including escaped + * newlines + * @param {Object} options + * @param {number} [options.indentAtStart] Accounts for leading contents on + * the first line, defaulting to `indent.length` + * @param {number} [options.lineWidth=80] + * @param {number} [options.minContentWidth=20] Allow highly indented lines to + * stretch the line width or indent content from the start + * @param {function} options.onFold Called once if the text is folded + * @param {function} options.onFold Called once if any line of text exceeds + * lineWidth characters + */ + + +function foldFlowLines(text, indent, mode, { + indentAtStart, + lineWidth = 80, + minContentWidth = 20, + onFold, + onOverflow +}) { + if (!lineWidth || lineWidth < 0) return text; + const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); + if (text.length <= endStep) return text; + const folds = []; + const escapedFolds = {}; + let end = lineWidth - indent.length; + + if (typeof indentAtStart === 'number') { + if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) folds.push(0);else end = lineWidth - indentAtStart; + } + + let split = undefined; + let prev = undefined; + let overflow = false; + let i = -1; + let escStart = -1; + let escEnd = -1; + + if (mode === FOLD_BLOCK) { + i = consumeMoreIndentedLines(text, i); + if (i !== -1) end = i + endStep; + } + + for (let ch; ch = text[i += 1];) { + if (mode === FOLD_QUOTED && ch === '\\') { + escStart = i; + + switch (text[i + 1]) { + case 'x': + i += 3; + break; + + case 'u': + i += 5; + break; + + case 'U': + i += 9; + break; + + default: + i += 1; + } + + escEnd = i; + } + + if (ch === '\n') { + if (mode === FOLD_BLOCK) i = consumeMoreIndentedLines(text, i); + end = i + endStep; + split = undefined; + } else { + if (ch === ' ' && prev && prev !== ' ' && prev !== '\n' && prev !== '\t') { + // space surrounded by non-space can be replaced with newline + indent + const next = text[i + 1]; + if (next && next !== ' ' && next !== '\n' && next !== '\t') split = i; + } + + if (i >= end) { + if (split) { + folds.push(split); + end = split + endStep; + split = undefined; + } else if (mode === FOLD_QUOTED) { + // white-space collected at end may stretch past lineWidth + while (prev === ' ' || prev === '\t') { + prev = ch; + ch = text[i += 1]; + overflow = true; + } // Account for newline escape, but don't break preceding escape + + + const j = i > escEnd + 1 ? i - 2 : escStart - 1; // Bail out if lineWidth & minContentWidth are shorter than an escape string + + if (escapedFolds[j]) return text; + folds.push(j); + escapedFolds[j] = true; + end = j + endStep; + split = undefined; + } else { + overflow = true; + } + } + } + + prev = ch; + } + + if (overflow && onOverflow) onOverflow(); + if (folds.length === 0) return text; + if (onFold) onFold(); + let res = text.slice(0, folds[0]); + + for (let i = 0; i < folds.length; ++i) { + const fold = folds[i]; + const end = folds[i + 1] || text.length; + if (fold === 0) res = `\n${indent}${text.slice(0, end)}`;else { + if (mode === FOLD_QUOTED && escapedFolds[fold]) res += `${text[fold]}\\`; + res += `\n${indent}${text.slice(fold + 1, end)}`; + } + } + + return res; +} + +const getFoldOptions = ({ + indentAtStart +}) => indentAtStart ? Object.assign({ + indentAtStart +}, strOptions.fold) : strOptions.fold; // Also checks for lines starting with %, as parsing the output as YAML 1.1 will +// presume that's starting a new document. + + +const containsDocumentMarker = str => /^(%|---|\.\.\.)/m.test(str); + +function lineLengthOverLimit(str, lineWidth, indentLength) { + if (!lineWidth || lineWidth < 0) return false; + const limit = lineWidth - indentLength; + const strLen = str.length; + if (strLen <= limit) return false; + + for (let i = 0, start = 0; i < strLen; ++i) { + if (str[i] === '\n') { + if (i - start > limit) return true; + start = i + 1; + if (strLen - start <= limit) return false; + } + } + + return true; +} + +function doubleQuotedString(value, ctx) { + const { + implicitKey + } = ctx; + const { + jsonEncoding, + minMultiLineLength + } = strOptions.doubleQuoted; + const json = JSON.stringify(value); + if (jsonEncoding) return json; + const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); + let str = ''; + let start = 0; + + for (let i = 0, ch = json[i]; ch; ch = json[++i]) { + if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { + // space before newline needs to be escaped to not be folded + str += json.slice(start, i) + '\\ '; + i += 1; + start = i; + ch = '\\'; + } + + if (ch === '\\') switch (json[i + 1]) { + case 'u': + { + str += json.slice(start, i); + const code = json.substr(i + 2, 4); + + switch (code) { + case '0000': + str += '\\0'; + break; + + case '0007': + str += '\\a'; + break; + + case '000b': + str += '\\v'; + break; + + case '001b': + str += '\\e'; + break; + + case '0085': + str += '\\N'; + break; + + case '00a0': + str += '\\_'; + break; + + case '2028': + str += '\\L'; + break; + + case '2029': + str += '\\P'; + break; + + default: + if (code.substr(0, 2) === '00') str += '\\x' + code.substr(2);else str += json.substr(i, 6); + } + + i += 5; + start = i + 1; + } + break; + + case 'n': + if (implicitKey || json[i + 2] === '"' || json.length < minMultiLineLength) { + i += 1; + } else { + // folding will eat first newline + str += json.slice(start, i) + '\n\n'; + + while (json[i + 2] === '\\' && json[i + 3] === 'n' && json[i + 4] !== '"') { + str += '\n'; + i += 2; + } + + str += indent; // space after newline needs to be escaped to not be folded + + if (json[i + 2] === ' ') str += '\\'; + i += 1; + start = i + 1; + } + + break; + + default: + i += 1; + } + } + + str = start ? str + json.slice(start) : json; + return implicitKey ? str : foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx)); +} + +function singleQuotedString(value, ctx) { + if (ctx.implicitKey) { + if (/\n/.test(value)) return doubleQuotedString(value, ctx); + } else { + // single quoted string can't have leading or trailing whitespace around newline + if (/[ \t]\n|\n[ \t]/.test(value)) return doubleQuotedString(value, ctx); + } + + const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); + const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; + return ctx.implicitKey ? res : foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx)); +} + +function blockString({ + comment, + type, + value +}, ctx, onComment, onChompKeep) { + // 1. Block can't end in whitespace unless the last line is non-empty. + // 2. Strings consisting of only whitespace are best rendered explicitly. + if (/\n[\t ]+$/.test(value) || /^\s*$/.test(value)) { + return doubleQuotedString(value, ctx); + } + + const indent = ctx.indent || (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : ''); + const indentSize = indent ? '2' : '1'; // root is at -1 + + const literal = type === PlainValue.Type.BLOCK_FOLDED ? false : type === PlainValue.Type.BLOCK_LITERAL ? true : !lineLengthOverLimit(value, strOptions.fold.lineWidth, indent.length); + let header = literal ? '|' : '>'; + if (!value) return header + '\n'; + let wsStart = ''; + let wsEnd = ''; + value = value.replace(/[\n\t ]*$/, ws => { + const n = ws.indexOf('\n'); + + if (n === -1) { + header += '-'; // strip + } else if (value === ws || n !== ws.length - 1) { + header += '+'; // keep + + if (onChompKeep) onChompKeep(); + } + + wsEnd = ws.replace(/\n$/, ''); + return ''; + }).replace(/^[\n ]*/, ws => { + if (ws.indexOf(' ') !== -1) header += indentSize; + const m = ws.match(/ +$/); + + if (m) { + wsStart = ws.slice(0, -m[0].length); + return m[0]; + } else { + wsStart = ws; + return ''; + } + }); + if (wsEnd) wsEnd = wsEnd.replace(/\n+(?!\n|$)/g, `$&${indent}`); + if (wsStart) wsStart = wsStart.replace(/\n+/g, `$&${indent}`); + + if (comment) { + header += ' #' + comment.replace(/ ?[\r\n]+/g, ' '); + if (onComment) onComment(); + } + + if (!value) return `${header}${indentSize}\n${indent}${wsEnd}`; + + if (literal) { + value = value.replace(/\n+/g, `$&${indent}`); + return `${header}\n${indent}${wsStart}${value}${wsEnd}`; + } + + value = value.replace(/\n+/g, '\n$&').replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded + // ^ ind.line ^ empty ^ capture next empty lines only at end of indent + .replace(/\n+/g, `$&${indent}`); + const body = foldFlowLines(`${wsStart}${value}${wsEnd}`, indent, FOLD_BLOCK, strOptions.fold); + return `${header}\n${indent}${body}`; +} + +function plainString(item, ctx, onComment, onChompKeep) { + const { + comment, + type, + value + } = item; + const { + actualString, + implicitKey, + indent, + inFlow + } = ctx; + + if (implicitKey && /[\n[\]{},]/.test(value) || inFlow && /[[\]{},]/.test(value)) { + return doubleQuotedString(value, ctx); + } + + if (!value || /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { + // not allowed: + // - empty string, '-' or '?' + // - start with an indicator character (except [?:-]) or /[?-] / + // - '\n ', ': ' or ' \n' anywhere + // - '#' not preceded by a non-space char + // - end with ' ' or ':' + return implicitKey || inFlow || value.indexOf('\n') === -1 ? value.indexOf('"') !== -1 && value.indexOf("'") === -1 ? singleQuotedString(value, ctx) : doubleQuotedString(value, ctx) : blockString(item, ctx, onComment, onChompKeep); + } + + if (!implicitKey && !inFlow && type !== PlainValue.Type.PLAIN && value.indexOf('\n') !== -1) { + // Where allowed & type not set explicitly, prefer block style for multiline strings + return blockString(item, ctx, onComment, onChompKeep); + } + + if (indent === '' && containsDocumentMarker(value)) { + ctx.forceBlockIndent = true; + return blockString(item, ctx, onComment, onChompKeep); + } + + const str = value.replace(/\n+/g, `$&\n${indent}`); // Verify that output will be parsed as a string, as e.g. plain numbers and + // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), + // and others in v1.1. + + if (actualString) { + const { + tags + } = ctx.doc.schema; + const resolved = resolveScalar(str, tags, tags.scalarFallback).value; + if (typeof resolved !== 'string') return doubleQuotedString(value, ctx); + } + + const body = implicitKey ? str : foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx)); + + if (comment && !inFlow && (body.indexOf('\n') !== -1 || comment.indexOf('\n') !== -1)) { + if (onComment) onComment(); + return addCommentBefore(body, indent, comment); + } + + return body; +} + +function stringifyString(item, ctx, onComment, onChompKeep) { + const { + defaultType + } = strOptions; + const { + implicitKey, + inFlow + } = ctx; + let { + type, + value + } = item; + + if (typeof value !== 'string') { + value = String(value); + item = Object.assign({}, item, { + value + }); + } + + const _stringify = _type => { + switch (_type) { + case PlainValue.Type.BLOCK_FOLDED: + case PlainValue.Type.BLOCK_LITERAL: + return blockString(item, ctx, onComment, onChompKeep); + + case PlainValue.Type.QUOTE_DOUBLE: + return doubleQuotedString(value, ctx); + + case PlainValue.Type.QUOTE_SINGLE: + return singleQuotedString(value, ctx); + + case PlainValue.Type.PLAIN: + return plainString(item, ctx, onComment, onChompKeep); + + default: + return null; + } + }; + + if (type !== PlainValue.Type.QUOTE_DOUBLE && /[\x00-\x08\x0b-\x1f\x7f-\x9f]/.test(value)) { + // force double quotes on control characters + type = PlainValue.Type.QUOTE_DOUBLE; + } else if ((implicitKey || inFlow) && (type === PlainValue.Type.BLOCK_FOLDED || type === PlainValue.Type.BLOCK_LITERAL)) { + // should not happen; blocks are not valid inside flow containers + type = PlainValue.Type.QUOTE_DOUBLE; + } + + let res = _stringify(type); + + if (res === null) { + res = _stringify(defaultType); + if (res === null) throw new Error(`Unsupported default string type ${defaultType}`); + } + + return res; +} + +function stringifyNumber({ + format, + minFractionDigits, + tag, + value +}) { + if (typeof value === 'bigint') return String(value); + if (!isFinite(value)) return isNaN(value) ? '.nan' : value < 0 ? '-.inf' : '.inf'; + let n = JSON.stringify(value); + + if (!format && minFractionDigits && (!tag || tag === 'tag:yaml.org,2002:float') && /^\d/.test(n)) { + let i = n.indexOf('.'); + + if (i < 0) { + i = n.length; + n += '.'; + } + + let d = minFractionDigits - (n.length - i - 1); + + while (d-- > 0) n += '0'; + } + + return n; +} + +function checkFlowCollectionEnd(errors, cst) { + let char, name; + + switch (cst.type) { + case PlainValue.Type.FLOW_MAP: + char = '}'; + name = 'flow map'; + break; + + case PlainValue.Type.FLOW_SEQ: + char = ']'; + name = 'flow sequence'; + break; + + default: + errors.push(new PlainValue.YAMLSemanticError(cst, 'Not a flow collection!?')); + return; + } + + let lastItem; + + for (let i = cst.items.length - 1; i >= 0; --i) { + const item = cst.items[i]; + + if (!item || item.type !== PlainValue.Type.COMMENT) { + lastItem = item; + break; + } + } + + if (lastItem && lastItem.char !== char) { + const msg = `Expected ${name} to end with ${char}`; + let err; + + if (typeof lastItem.offset === 'number') { + err = new PlainValue.YAMLSemanticError(cst, msg); + err.offset = lastItem.offset + 1; + } else { + err = new PlainValue.YAMLSemanticError(lastItem, msg); + if (lastItem.range && lastItem.range.end) err.offset = lastItem.range.end - lastItem.range.start; + } + + errors.push(err); + } +} +function checkFlowCommentSpace(errors, comment) { + const prev = comment.context.src[comment.range.start - 1]; + + if (prev !== '\n' && prev !== '\t' && prev !== ' ') { + const msg = 'Comments must be separated from other tokens by white space characters'; + errors.push(new PlainValue.YAMLSemanticError(comment, msg)); + } +} +function getLongKeyError(source, key) { + const sk = String(key); + const k = sk.substr(0, 8) + '...' + sk.substr(-8); + return new PlainValue.YAMLSemanticError(source, `The "${k}" key is too long`); +} +function resolveComments(collection, comments) { + for (const { + afterKey, + before, + comment + } of comments) { + let item = collection.items[before]; + + if (!item) { + if (comment !== undefined) { + if (collection.comment) collection.comment += '\n' + comment;else collection.comment = comment; + } + } else { + if (afterKey && item.value) item = item.value; + + if (comment === undefined) { + if (afterKey || !item.commentBefore) item.spaceBefore = true; + } else { + if (item.commentBefore) item.commentBefore += '\n' + comment;else item.commentBefore = comment; + } + } + } +} + +// on error, will return { str: string, errors: Error[] } +function resolveString(doc, node) { + const res = node.strValue; + if (!res) return ''; + if (typeof res === 'string') return res; + res.errors.forEach(error => { + if (!error.source) error.source = node; + doc.errors.push(error); + }); + return res.str; +} + +function resolveTagHandle(doc, node) { + const { + handle, + suffix + } = node.tag; + let prefix = doc.tagPrefixes.find(p => p.handle === handle); + + if (!prefix) { + const dtp = doc.getDefaults().tagPrefixes; + if (dtp) prefix = dtp.find(p => p.handle === handle); + if (!prefix) throw new PlainValue.YAMLSemanticError(node, `The ${handle} tag handle is non-default and was not declared.`); + } + + if (!suffix) throw new PlainValue.YAMLSemanticError(node, `The ${handle} tag has no suffix.`); + + if (handle === '!' && (doc.version || doc.options.version) === '1.0') { + if (suffix[0] === '^') { + doc.warnings.push(new PlainValue.YAMLWarning(node, 'YAML 1.0 ^ tag expansion is not supported')); + return suffix; + } + + if (/[:/]/.test(suffix)) { + // word/foo -> tag:word.yaml.org,2002:foo + const vocab = suffix.match(/^([a-z0-9-]+)\/(.*)/i); + return vocab ? `tag:${vocab[1]}.yaml.org,2002:${vocab[2]}` : `tag:${suffix}`; + } + } + + return prefix.prefix + decodeURIComponent(suffix); +} + +function resolveTagName(doc, node) { + const { + tag, + type + } = node; + let nonSpecific = false; + + if (tag) { + const { + handle, + suffix, + verbatim + } = tag; + + if (verbatim) { + if (verbatim !== '!' && verbatim !== '!!') return verbatim; + const msg = `Verbatim tags aren't resolved, so ${verbatim} is invalid.`; + doc.errors.push(new PlainValue.YAMLSemanticError(node, msg)); + } else if (handle === '!' && !suffix) { + nonSpecific = true; + } else { + try { + return resolveTagHandle(doc, node); + } catch (error) { + doc.errors.push(error); + } + } + } + + switch (type) { + case PlainValue.Type.BLOCK_FOLDED: + case PlainValue.Type.BLOCK_LITERAL: + case PlainValue.Type.QUOTE_DOUBLE: + case PlainValue.Type.QUOTE_SINGLE: + return PlainValue.defaultTags.STR; + + case PlainValue.Type.FLOW_MAP: + case PlainValue.Type.MAP: + return PlainValue.defaultTags.MAP; + + case PlainValue.Type.FLOW_SEQ: + case PlainValue.Type.SEQ: + return PlainValue.defaultTags.SEQ; + + case PlainValue.Type.PLAIN: + return nonSpecific ? PlainValue.defaultTags.STR : null; + + default: + return null; + } +} + +function resolveByTagName(doc, node, tagName) { + const { + tags + } = doc.schema; + const matchWithTest = []; + + for (const tag of tags) { + if (tag.tag === tagName) { + if (tag.test) matchWithTest.push(tag);else { + const res = tag.resolve(doc, node); + return res instanceof Collection ? res : new Scalar(res); + } + } + } + + const str = resolveString(doc, node); + if (typeof str === 'string' && matchWithTest.length > 0) return resolveScalar(str, matchWithTest, tags.scalarFallback); + return null; +} + +function getFallbackTagName({ + type +}) { + switch (type) { + case PlainValue.Type.FLOW_MAP: + case PlainValue.Type.MAP: + return PlainValue.defaultTags.MAP; + + case PlainValue.Type.FLOW_SEQ: + case PlainValue.Type.SEQ: + return PlainValue.defaultTags.SEQ; + + default: + return PlainValue.defaultTags.STR; + } +} + +function resolveTag(doc, node, tagName) { + try { + const res = resolveByTagName(doc, node, tagName); + + if (res) { + if (tagName && node.tag) res.tag = tagName; + return res; + } + } catch (error) { + /* istanbul ignore if */ + if (!error.source) error.source = node; + doc.errors.push(error); + return null; + } + + try { + const fallback = getFallbackTagName(node); + if (!fallback) throw new Error(`The tag ${tagName} is unavailable`); + const msg = `The tag ${tagName} is unavailable, falling back to ${fallback}`; + doc.warnings.push(new PlainValue.YAMLWarning(node, msg)); + const res = resolveByTagName(doc, node, fallback); + res.tag = tagName; + return res; + } catch (error) { + const refError = new PlainValue.YAMLReferenceError(node, error.message); + refError.stack = error.stack; + doc.errors.push(refError); + return null; + } +} + +const isCollectionItem = node => { + if (!node) return false; + const { + type + } = node; + return type === PlainValue.Type.MAP_KEY || type === PlainValue.Type.MAP_VALUE || type === PlainValue.Type.SEQ_ITEM; +}; + +function resolveNodeProps(errors, node) { + const comments = { + before: [], + after: [] + }; + let hasAnchor = false; + let hasTag = false; + const props = isCollectionItem(node.context.parent) ? node.context.parent.props.concat(node.props) : node.props; + + for (const { + start, + end + } of props) { + switch (node.context.src[start]) { + case PlainValue.Char.COMMENT: + { + if (!node.commentHasRequiredWhitespace(start)) { + const msg = 'Comments must be separated from other tokens by white space characters'; + errors.push(new PlainValue.YAMLSemanticError(node, msg)); + } + + const { + header, + valueRange + } = node; + const cc = valueRange && (start > valueRange.start || header && start > header.start) ? comments.after : comments.before; + cc.push(node.context.src.slice(start + 1, end)); + break; + } + // Actual anchor & tag resolution is handled by schema, here we just complain + + case PlainValue.Char.ANCHOR: + if (hasAnchor) { + const msg = 'A node can have at most one anchor'; + errors.push(new PlainValue.YAMLSemanticError(node, msg)); + } + + hasAnchor = true; + break; + + case PlainValue.Char.TAG: + if (hasTag) { + const msg = 'A node can have at most one tag'; + errors.push(new PlainValue.YAMLSemanticError(node, msg)); + } + + hasTag = true; + break; + } + } + + return { + comments, + hasAnchor, + hasTag + }; +} + +function resolveNodeValue(doc, node) { + const { + anchors, + errors, + schema + } = doc; + + if (node.type === PlainValue.Type.ALIAS) { + const name = node.rawValue; + const src = anchors.getNode(name); + + if (!src) { + const msg = `Aliased anchor not found: ${name}`; + errors.push(new PlainValue.YAMLReferenceError(node, msg)); + return null; + } // Lazy resolution for circular references + + + const res = new Alias(src); + + anchors._cstAliases.push(res); + + return res; + } + + const tagName = resolveTagName(doc, node); + if (tagName) return resolveTag(doc, node, tagName); + + if (node.type !== PlainValue.Type.PLAIN) { + const msg = `Failed to resolve ${node.type} node here`; + errors.push(new PlainValue.YAMLSyntaxError(node, msg)); + return null; + } + + try { + const str = resolveString(doc, node); + return resolveScalar(str, schema.tags, schema.tags.scalarFallback); + } catch (error) { + if (!error.source) error.source = node; + errors.push(error); + return null; + } +} // sets node.resolved on success + + +function resolveNode(doc, node) { + if (!node) return null; + if (node.error) doc.errors.push(node.error); + const { + comments, + hasAnchor, + hasTag + } = resolveNodeProps(doc.errors, node); + + if (hasAnchor) { + const { + anchors + } = doc; + const name = node.anchor; + const prev = anchors.getNode(name); // At this point, aliases for any preceding node with the same anchor + // name have already been resolved, so it may safely be renamed. + + if (prev) anchors.map[anchors.newName(name)] = prev; // During parsing, we need to store the CST node in anchors.map as + // anchors need to be available during resolution to allow for + // circular references. + + anchors.map[name] = node; + } + + if (node.type === PlainValue.Type.ALIAS && (hasAnchor || hasTag)) { + const msg = 'An alias node must not specify any properties'; + doc.errors.push(new PlainValue.YAMLSemanticError(node, msg)); + } + + const res = resolveNodeValue(doc, node); + + if (res) { + res.range = [node.range.start, node.range.end]; + if (doc.options.keepCstNodes) res.cstNode = node; + if (doc.options.keepNodeTypes) res.type = node.type; + const cb = comments.before.join('\n'); + + if (cb) { + res.commentBefore = res.commentBefore ? `${res.commentBefore}\n${cb}` : cb; + } + + const ca = comments.after.join('\n'); + if (ca) res.comment = res.comment ? `${res.comment}\n${ca}` : ca; + } + + return node.resolved = res; +} + +function resolveMap(doc, cst) { + if (cst.type !== PlainValue.Type.MAP && cst.type !== PlainValue.Type.FLOW_MAP) { + const msg = `A ${cst.type} node cannot be resolved as a mapping`; + doc.errors.push(new PlainValue.YAMLSyntaxError(cst, msg)); + return null; + } + + const { + comments, + items + } = cst.type === PlainValue.Type.FLOW_MAP ? resolveFlowMapItems(doc, cst) : resolveBlockMapItems(doc, cst); + const map = new YAMLMap(); + map.items = items; + resolveComments(map, comments); + let hasCollectionKey = false; + + for (let i = 0; i < items.length; ++i) { + const { + key: iKey + } = items[i]; + if (iKey instanceof Collection) hasCollectionKey = true; + + if (doc.schema.merge && iKey && iKey.value === MERGE_KEY) { + items[i] = new Merge(items[i]); + const sources = items[i].value.items; + let error = null; + sources.some(node => { + if (node instanceof Alias) { + // During parsing, alias sources are CST nodes; to account for + // circular references their resolved values can't be used here. + const { + type + } = node.source; + if (type === PlainValue.Type.MAP || type === PlainValue.Type.FLOW_MAP) return false; + return error = 'Merge nodes aliases can only point to maps'; + } + + return error = 'Merge nodes can only have Alias nodes as values'; + }); + if (error) doc.errors.push(new PlainValue.YAMLSemanticError(cst, error)); + } else { + for (let j = i + 1; j < items.length; ++j) { + const { + key: jKey + } = items[j]; + + if (iKey === jKey || iKey && jKey && Object.prototype.hasOwnProperty.call(iKey, 'value') && iKey.value === jKey.value) { + const msg = `Map keys must be unique; "${iKey}" is repeated`; + doc.errors.push(new PlainValue.YAMLSemanticError(cst, msg)); + break; + } + } + } + } + + if (hasCollectionKey && !doc.options.mapAsMap) { + const warn = 'Keys with collection values will be stringified as YAML due to JS Object restrictions. Use mapAsMap: true to avoid this.'; + doc.warnings.push(new PlainValue.YAMLWarning(cst, warn)); + } + + cst.resolved = map; + return map; +} + +const valueHasPairComment = ({ + context: { + lineStart, + node, + src + }, + props +}) => { + if (props.length === 0) return false; + const { + start + } = props[0]; + if (node && start > node.valueRange.start) return false; + if (src[start] !== PlainValue.Char.COMMENT) return false; + + for (let i = lineStart; i < start; ++i) if (src[i] === '\n') return false; + + return true; +}; + +function resolvePairComment(item, pair) { + if (!valueHasPairComment(item)) return; + const comment = item.getPropValue(0, PlainValue.Char.COMMENT, true); + let found = false; + const cb = pair.value.commentBefore; + + if (cb && cb.startsWith(comment)) { + pair.value.commentBefore = cb.substr(comment.length + 1); + found = true; + } else { + const cc = pair.value.comment; + + if (!item.node && cc && cc.startsWith(comment)) { + pair.value.comment = cc.substr(comment.length + 1); + found = true; + } + } + + if (found) pair.comment = comment; +} + +function resolveBlockMapItems(doc, cst) { + const comments = []; + const items = []; + let key = undefined; + let keyStart = null; + + for (let i = 0; i < cst.items.length; ++i) { + const item = cst.items[i]; + + switch (item.type) { + case PlainValue.Type.BLANK_LINE: + comments.push({ + afterKey: !!key, + before: items.length + }); + break; + + case PlainValue.Type.COMMENT: + comments.push({ + afterKey: !!key, + before: items.length, + comment: item.comment + }); + break; + + case PlainValue.Type.MAP_KEY: + if (key !== undefined) items.push(new Pair(key)); + if (item.error) doc.errors.push(item.error); + key = resolveNode(doc, item.node); + keyStart = null; + break; + + case PlainValue.Type.MAP_VALUE: + { + if (key === undefined) key = null; + if (item.error) doc.errors.push(item.error); + + if (!item.context.atLineStart && item.node && item.node.type === PlainValue.Type.MAP && !item.node.context.atLineStart) { + const msg = 'Nested mappings are not allowed in compact mappings'; + doc.errors.push(new PlainValue.YAMLSemanticError(item.node, msg)); + } + + let valueNode = item.node; + + if (!valueNode && item.props.length > 0) { + // Comments on an empty mapping value need to be preserved, so we + // need to construct a minimal empty node here to use instead of the + // missing `item.node`. -- eemeli/yaml#19 + valueNode = new PlainValue.PlainValue(PlainValue.Type.PLAIN, []); + valueNode.context = { + parent: item, + src: item.context.src + }; + const pos = item.range.start + 1; + valueNode.range = { + start: pos, + end: pos + }; + valueNode.valueRange = { + start: pos, + end: pos + }; + + if (typeof item.range.origStart === 'number') { + const origPos = item.range.origStart + 1; + valueNode.range.origStart = valueNode.range.origEnd = origPos; + valueNode.valueRange.origStart = valueNode.valueRange.origEnd = origPos; + } + } + + const pair = new Pair(key, resolveNode(doc, valueNode)); + resolvePairComment(item, pair); + items.push(pair); + + if (key && typeof keyStart === 'number') { + if (item.range.start > keyStart + 1024) doc.errors.push(getLongKeyError(cst, key)); + } + + key = undefined; + keyStart = null; + } + break; + + default: + if (key !== undefined) items.push(new Pair(key)); + key = resolveNode(doc, item); + keyStart = item.range.start; + if (item.error) doc.errors.push(item.error); + + next: for (let j = i + 1;; ++j) { + const nextItem = cst.items[j]; + + switch (nextItem && nextItem.type) { + case PlainValue.Type.BLANK_LINE: + case PlainValue.Type.COMMENT: + continue next; + + case PlainValue.Type.MAP_VALUE: + break next; + + default: + { + const msg = 'Implicit map keys need to be followed by map values'; + doc.errors.push(new PlainValue.YAMLSemanticError(item, msg)); + break next; + } + } + } + + if (item.valueRangeContainsNewline) { + const msg = 'Implicit map keys need to be on a single line'; + doc.errors.push(new PlainValue.YAMLSemanticError(item, msg)); + } + + } + } + + if (key !== undefined) items.push(new Pair(key)); + return { + comments, + items + }; +} + +function resolveFlowMapItems(doc, cst) { + const comments = []; + const items = []; + let key = undefined; + let explicitKey = false; + let next = '{'; + + for (let i = 0; i < cst.items.length; ++i) { + const item = cst.items[i]; + + if (typeof item.char === 'string') { + const { + char, + offset + } = item; + + if (char === '?' && key === undefined && !explicitKey) { + explicitKey = true; + next = ':'; + continue; + } + + if (char === ':') { + if (key === undefined) key = null; + + if (next === ':') { + next = ','; + continue; + } + } else { + if (explicitKey) { + if (key === undefined && char !== ',') key = null; + explicitKey = false; + } + + if (key !== undefined) { + items.push(new Pair(key)); + key = undefined; + + if (char === ',') { + next = ':'; + continue; + } + } + } + + if (char === '}') { + if (i === cst.items.length - 1) continue; + } else if (char === next) { + next = ':'; + continue; + } + + const msg = `Flow map contains an unexpected ${char}`; + const err = new PlainValue.YAMLSyntaxError(cst, msg); + err.offset = offset; + doc.errors.push(err); + } else if (item.type === PlainValue.Type.BLANK_LINE) { + comments.push({ + afterKey: !!key, + before: items.length + }); + } else if (item.type === PlainValue.Type.COMMENT) { + checkFlowCommentSpace(doc.errors, item); + comments.push({ + afterKey: !!key, + before: items.length, + comment: item.comment + }); + } else if (key === undefined) { + if (next === ',') doc.errors.push(new PlainValue.YAMLSemanticError(item, 'Separator , missing in flow map')); + key = resolveNode(doc, item); + } else { + if (next !== ',') doc.errors.push(new PlainValue.YAMLSemanticError(item, 'Indicator : missing in flow map entry')); + items.push(new Pair(key, resolveNode(doc, item))); + key = undefined; + explicitKey = false; + } + } + + checkFlowCollectionEnd(doc.errors, cst); + if (key !== undefined) items.push(new Pair(key)); + return { + comments, + items + }; +} + +function resolveSeq(doc, cst) { + if (cst.type !== PlainValue.Type.SEQ && cst.type !== PlainValue.Type.FLOW_SEQ) { + const msg = `A ${cst.type} node cannot be resolved as a sequence`; + doc.errors.push(new PlainValue.YAMLSyntaxError(cst, msg)); + return null; + } + + const { + comments, + items + } = cst.type === PlainValue.Type.FLOW_SEQ ? resolveFlowSeqItems(doc, cst) : resolveBlockSeqItems(doc, cst); + const seq = new YAMLSeq(); + seq.items = items; + resolveComments(seq, comments); + + if (!doc.options.mapAsMap && items.some(it => it instanceof Pair && it.key instanceof Collection)) { + const warn = 'Keys with collection values will be stringified as YAML due to JS Object restrictions. Use mapAsMap: true to avoid this.'; + doc.warnings.push(new PlainValue.YAMLWarning(cst, warn)); + } + + cst.resolved = seq; + return seq; +} + +function resolveBlockSeqItems(doc, cst) { + const comments = []; + const items = []; + + for (let i = 0; i < cst.items.length; ++i) { + const item = cst.items[i]; + + switch (item.type) { + case PlainValue.Type.BLANK_LINE: + comments.push({ + before: items.length + }); + break; + + case PlainValue.Type.COMMENT: + comments.push({ + comment: item.comment, + before: items.length + }); + break; + + case PlainValue.Type.SEQ_ITEM: + if (item.error) doc.errors.push(item.error); + items.push(resolveNode(doc, item.node)); + + if (item.hasProps) { + const msg = 'Sequence items cannot have tags or anchors before the - indicator'; + doc.errors.push(new PlainValue.YAMLSemanticError(item, msg)); + } + + break; + + default: + if (item.error) doc.errors.push(item.error); + doc.errors.push(new PlainValue.YAMLSyntaxError(item, `Unexpected ${item.type} node in sequence`)); + } + } + + return { + comments, + items + }; +} + +function resolveFlowSeqItems(doc, cst) { + const comments = []; + const items = []; + let explicitKey = false; + let key = undefined; + let keyStart = null; + let next = '['; + let prevItem = null; + + for (let i = 0; i < cst.items.length; ++i) { + const item = cst.items[i]; + + if (typeof item.char === 'string') { + const { + char, + offset + } = item; + + if (char !== ':' && (explicitKey || key !== undefined)) { + if (explicitKey && key === undefined) key = next ? items.pop() : null; + items.push(new Pair(key)); + explicitKey = false; + key = undefined; + keyStart = null; + } + + if (char === next) { + next = null; + } else if (!next && char === '?') { + explicitKey = true; + } else if (next !== '[' && char === ':' && key === undefined) { + if (next === ',') { + key = items.pop(); + + if (key instanceof Pair) { + const msg = 'Chaining flow sequence pairs is invalid'; + const err = new PlainValue.YAMLSemanticError(cst, msg); + err.offset = offset; + doc.errors.push(err); + } + + if (!explicitKey && typeof keyStart === 'number') { + const keyEnd = item.range ? item.range.start : item.offset; + if (keyEnd > keyStart + 1024) doc.errors.push(getLongKeyError(cst, key)); + const { + src + } = prevItem.context; + + for (let i = keyStart; i < keyEnd; ++i) if (src[i] === '\n') { + const msg = 'Implicit keys of flow sequence pairs need to be on a single line'; + doc.errors.push(new PlainValue.YAMLSemanticError(prevItem, msg)); + break; + } + } + } else { + key = null; + } + + keyStart = null; + explicitKey = false; + next = null; + } else if (next === '[' || char !== ']' || i < cst.items.length - 1) { + const msg = `Flow sequence contains an unexpected ${char}`; + const err = new PlainValue.YAMLSyntaxError(cst, msg); + err.offset = offset; + doc.errors.push(err); + } + } else if (item.type === PlainValue.Type.BLANK_LINE) { + comments.push({ + before: items.length + }); + } else if (item.type === PlainValue.Type.COMMENT) { + checkFlowCommentSpace(doc.errors, item); + comments.push({ + comment: item.comment, + before: items.length + }); + } else { + if (next) { + const msg = `Expected a ${next} in flow sequence`; + doc.errors.push(new PlainValue.YAMLSemanticError(item, msg)); + } + + const value = resolveNode(doc, item); + + if (key === undefined) { + items.push(value); + prevItem = item; + } else { + items.push(new Pair(key, value)); + key = undefined; + } + + keyStart = item.range.start; + next = ','; + } + } + + checkFlowCollectionEnd(doc.errors, cst); + if (key !== undefined) items.push(new Pair(key)); + return { + comments, + items + }; +} + +exports.Alias = Alias; +exports.Collection = Collection; +exports.Merge = Merge; +exports.Node = Node; +exports.Pair = Pair; +exports.Scalar = Scalar; +exports.YAMLMap = YAMLMap; +exports.YAMLSeq = YAMLSeq; +exports.addComment = addComment; +exports.binaryOptions = binaryOptions; +exports.boolOptions = boolOptions; +exports.findPair = findPair; +exports.intOptions = intOptions; +exports.isEmptyPath = isEmptyPath; +exports.nullOptions = nullOptions; +exports.resolveMap = resolveMap; +exports.resolveNode = resolveNode; +exports.resolveSeq = resolveSeq; +exports.resolveString = resolveString; +exports.strOptions = strOptions; +exports.stringifyNumber = stringifyNumber; +exports.stringifyString = stringifyString; +exports.toJSON = toJSON; diff --git a/node_modules/yaml/dist/test-events.js b/node_modules/yaml/dist/test-events.js new file mode 100644 index 0000000..2b856e1 --- /dev/null +++ b/node_modules/yaml/dist/test-events.js @@ -0,0 +1,162 @@ +'use strict'; + +var parseCst = require('./parse-cst.js'); +var Document = require('./Document-9b4560a1.js'); +require('./PlainValue-ec8e588e.js'); +require('./resolveSeq-d03cb037.js'); +require('./Schema-88e323a7.js'); +require('./warnings-1000a372.js'); + +function testEvents(src, options) { + const opt = Object.assign({ + keepCstNodes: true, + keepNodeTypes: true, + version: '1.2' + }, options); + const docs = parseCst.parse(src).map(cstDoc => new Document.Document(opt).parse(cstDoc)); + const errDoc = docs.find(doc => doc.errors.length > 0); + const error = errDoc ? errDoc.errors[0].message : null; + const events = ['+STR']; + + try { + for (let i = 0; i < docs.length; ++i) { + const doc = docs[i]; + let root = doc.contents; + if (Array.isArray(root)) root = root[0]; + const [rootStart, rootEnd] = doc.range || [0, 0]; + let e = doc.errors[0] && doc.errors[0].source; + if (e && e.type === 'SEQ_ITEM') e = e.node; + if (e && (e.type === 'DOCUMENT' || e.range.start < rootStart)) throw new Error(); + let docStart = '+DOC'; + const pre = src.slice(0, rootStart); + const explicitDoc = /---\s*$/.test(pre); + if (explicitDoc) docStart += ' ---';else if (!doc.contents) continue; + events.push(docStart); + addEvents(events, doc, e, root); + if (doc.contents && doc.contents.length > 1) throw new Error(); + let docEnd = '-DOC'; + + if (rootEnd) { + const post = src.slice(rootEnd); + if (/^\.\.\./.test(post)) docEnd += ' ...'; + } + + events.push(docEnd); + } + } catch (e) { + return { + events, + error: error || e + }; + } + + events.push('-STR'); + return { + events, + error + }; +} + +function addEvents(events, doc, e, node) { + if (!node) { + events.push('=VAL :'); + return; + } + + if (e && node.cstNode === e) throw new Error(); + let props = ''; + let anchor = doc.anchors.getName(node); + + if (anchor) { + if (/\d$/.test(anchor)) { + const alt = anchor.replace(/\d$/, ''); + if (doc.anchors.getNode(alt)) anchor = alt; + } + + props = ` &${anchor}`; + } + + if (node.cstNode && node.cstNode.tag) { + const { + handle, + suffix + } = node.cstNode.tag; + props += handle === '!' && !suffix ? ' <!>' : ` <${node.tag}>`; + } + + let scalar = null; + + switch (node.type) { + case 'ALIAS': + { + let alias = doc.anchors.getName(node.source); + + if (/\d$/.test(alias)) { + const alt = alias.replace(/\d$/, ''); + if (doc.anchors.getNode(alt)) alias = alt; + } + + events.push(`=ALI${props} *${alias}`); + } + break; + + case 'BLOCK_FOLDED': + scalar = '>'; + break; + + case 'BLOCK_LITERAL': + scalar = '|'; + break; + + case 'PLAIN': + scalar = ':'; + break; + + case 'QUOTE_DOUBLE': + scalar = '"'; + break; + + case 'QUOTE_SINGLE': + scalar = "'"; + break; + + case 'PAIR': + events.push(`+MAP${props}`); + addEvents(events, doc, e, node.key); + addEvents(events, doc, e, node.value); + events.push('-MAP'); + break; + + case 'FLOW_SEQ': + case 'SEQ': + events.push(`+SEQ${props}`); + node.items.forEach(item => { + addEvents(events, doc, e, item); + }); + events.push('-SEQ'); + break; + + case 'FLOW_MAP': + case 'MAP': + events.push(`+MAP${props}`); + node.items.forEach(({ + key, + value + }) => { + addEvents(events, doc, e, key); + addEvents(events, doc, e, value); + }); + events.push('-MAP'); + break; + + default: + throw new Error(`Unexpected node type ${node.type}`); + } + + if (scalar) { + const value = node.cstNode.strValue.replace(/\\/g, '\\\\').replace(/\0/g, '\\0').replace(/\x07/g, '\\a').replace(/\x08/g, '\\b').replace(/\t/g, '\\t').replace(/\n/g, '\\n').replace(/\v/g, '\\v').replace(/\f/g, '\\f').replace(/\r/g, '\\r').replace(/\x1b/g, '\\e'); + events.push(`=VAL${props} ${scalar}${value}`); + } +} + +exports.testEvents = testEvents; diff --git a/node_modules/yaml/dist/types.js b/node_modules/yaml/dist/types.js new file mode 100644 index 0000000..967c04e --- /dev/null +++ b/node_modules/yaml/dist/types.js @@ -0,0 +1,23 @@ +'use strict'; + +var resolveSeq = require('./resolveSeq-d03cb037.js'); +var Schema = require('./Schema-88e323a7.js'); +require('./PlainValue-ec8e588e.js'); +require('./warnings-1000a372.js'); + + + +exports.Alias = resolveSeq.Alias; +exports.Collection = resolveSeq.Collection; +exports.Merge = resolveSeq.Merge; +exports.Node = resolveSeq.Node; +exports.Pair = resolveSeq.Pair; +exports.Scalar = resolveSeq.Scalar; +exports.YAMLMap = resolveSeq.YAMLMap; +exports.YAMLSeq = resolveSeq.YAMLSeq; +exports.binaryOptions = resolveSeq.binaryOptions; +exports.boolOptions = resolveSeq.boolOptions; +exports.intOptions = resolveSeq.intOptions; +exports.nullOptions = resolveSeq.nullOptions; +exports.strOptions = resolveSeq.strOptions; +exports.Schema = Schema.Schema; diff --git a/node_modules/yaml/dist/util.js b/node_modules/yaml/dist/util.js new file mode 100644 index 0000000..bc1e079 --- /dev/null +++ b/node_modules/yaml/dist/util.js @@ -0,0 +1,19 @@ +'use strict'; + +var resolveSeq = require('./resolveSeq-d03cb037.js'); +var PlainValue = require('./PlainValue-ec8e588e.js'); + + + +exports.findPair = resolveSeq.findPair; +exports.parseMap = resolveSeq.resolveMap; +exports.parseSeq = resolveSeq.resolveSeq; +exports.stringifyNumber = resolveSeq.stringifyNumber; +exports.stringifyString = resolveSeq.stringifyString; +exports.toJSON = resolveSeq.toJSON; +exports.Type = PlainValue.Type; +exports.YAMLError = PlainValue.YAMLError; +exports.YAMLReferenceError = PlainValue.YAMLReferenceError; +exports.YAMLSemanticError = PlainValue.YAMLSemanticError; +exports.YAMLSyntaxError = PlainValue.YAMLSyntaxError; +exports.YAMLWarning = PlainValue.YAMLWarning; diff --git a/node_modules/yaml/dist/warnings-1000a372.js b/node_modules/yaml/dist/warnings-1000a372.js new file mode 100644 index 0000000..7776d9f --- /dev/null +++ b/node_modules/yaml/dist/warnings-1000a372.js @@ -0,0 +1,416 @@ +'use strict'; + +var PlainValue = require('./PlainValue-ec8e588e.js'); +var resolveSeq = require('./resolveSeq-d03cb037.js'); + +/* global atob, btoa, Buffer */ +const binary = { + identify: value => value instanceof Uint8Array, + // Buffer inherits from Uint8Array + default: false, + tag: 'tag:yaml.org,2002:binary', + + /** + * Returns a Buffer in node and an Uint8Array in browsers + * + * To use the resulting buffer as an image, you'll want to do something like: + * + * const blob = new Blob([buffer], { type: 'image/jpeg' }) + * document.querySelector('#photo').src = URL.createObjectURL(blob) + */ + resolve: (doc, node) => { + const src = resolveSeq.resolveString(doc, node); + + if (typeof Buffer === 'function') { + return Buffer.from(src, 'base64'); + } else if (typeof atob === 'function') { + // On IE 11, atob() can't handle newlines + const str = atob(src.replace(/[\n\r]/g, '')); + const buffer = new Uint8Array(str.length); + + for (let i = 0; i < str.length; ++i) buffer[i] = str.charCodeAt(i); + + return buffer; + } else { + const msg = 'This environment does not support reading binary tags; either Buffer or atob is required'; + doc.errors.push(new PlainValue.YAMLReferenceError(node, msg)); + return null; + } + }, + options: resolveSeq.binaryOptions, + stringify: ({ + comment, + type, + value + }, ctx, onComment, onChompKeep) => { + let src; + + if (typeof Buffer === 'function') { + src = value instanceof Buffer ? value.toString('base64') : Buffer.from(value.buffer).toString('base64'); + } else if (typeof btoa === 'function') { + let s = ''; + + for (let i = 0; i < value.length; ++i) s += String.fromCharCode(value[i]); + + src = btoa(s); + } else { + throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); + } + + if (!type) type = resolveSeq.binaryOptions.defaultType; + + if (type === PlainValue.Type.QUOTE_DOUBLE) { + value = src; + } else { + const { + lineWidth + } = resolveSeq.binaryOptions; + const n = Math.ceil(src.length / lineWidth); + const lines = new Array(n); + + for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { + lines[i] = src.substr(o, lineWidth); + } + + value = lines.join(type === PlainValue.Type.BLOCK_LITERAL ? '\n' : ' '); + } + + return resolveSeq.stringifyString({ + comment, + type, + value + }, ctx, onComment, onChompKeep); + } +}; + +function parsePairs(doc, cst) { + const seq = resolveSeq.resolveSeq(doc, cst); + + for (let i = 0; i < seq.items.length; ++i) { + let item = seq.items[i]; + if (item instanceof resolveSeq.Pair) continue;else if (item instanceof resolveSeq.YAMLMap) { + if (item.items.length > 1) { + const msg = 'Each pair must have its own sequence indicator'; + throw new PlainValue.YAMLSemanticError(cst, msg); + } + + const pair = item.items[0] || new resolveSeq.Pair(); + if (item.commentBefore) pair.commentBefore = pair.commentBefore ? `${item.commentBefore}\n${pair.commentBefore}` : item.commentBefore; + if (item.comment) pair.comment = pair.comment ? `${item.comment}\n${pair.comment}` : item.comment; + item = pair; + } + seq.items[i] = item instanceof resolveSeq.Pair ? item : new resolveSeq.Pair(item); + } + + return seq; +} +function createPairs(schema, iterable, ctx) { + const pairs = new resolveSeq.YAMLSeq(schema); + pairs.tag = 'tag:yaml.org,2002:pairs'; + + for (const it of iterable) { + let key, value; + + if (Array.isArray(it)) { + if (it.length === 2) { + key = it[0]; + value = it[1]; + } else throw new TypeError(`Expected [key, value] tuple: ${it}`); + } else if (it && it instanceof Object) { + const keys = Object.keys(it); + + if (keys.length === 1) { + key = keys[0]; + value = it[key]; + } else throw new TypeError(`Expected { key: value } tuple: ${it}`); + } else { + key = it; + } + + const pair = schema.createPair(key, value, ctx); + pairs.items.push(pair); + } + + return pairs; +} +const pairs = { + default: false, + tag: 'tag:yaml.org,2002:pairs', + resolve: parsePairs, + createNode: createPairs +}; + +class YAMLOMap extends resolveSeq.YAMLSeq { + constructor() { + super(); + + PlainValue._defineProperty(this, "add", resolveSeq.YAMLMap.prototype.add.bind(this)); + + PlainValue._defineProperty(this, "delete", resolveSeq.YAMLMap.prototype.delete.bind(this)); + + PlainValue._defineProperty(this, "get", resolveSeq.YAMLMap.prototype.get.bind(this)); + + PlainValue._defineProperty(this, "has", resolveSeq.YAMLMap.prototype.has.bind(this)); + + PlainValue._defineProperty(this, "set", resolveSeq.YAMLMap.prototype.set.bind(this)); + + this.tag = YAMLOMap.tag; + } + + toJSON(_, ctx) { + const map = new Map(); + if (ctx && ctx.onCreate) ctx.onCreate(map); + + for (const pair of this.items) { + let key, value; + + if (pair instanceof resolveSeq.Pair) { + key = resolveSeq.toJSON(pair.key, '', ctx); + value = resolveSeq.toJSON(pair.value, key, ctx); + } else { + key = resolveSeq.toJSON(pair, '', ctx); + } + + if (map.has(key)) throw new Error('Ordered maps must not include duplicate keys'); + map.set(key, value); + } + + return map; + } + +} + +PlainValue._defineProperty(YAMLOMap, "tag", 'tag:yaml.org,2002:omap'); + +function parseOMap(doc, cst) { + const pairs = parsePairs(doc, cst); + const seenKeys = []; + + for (const { + key + } of pairs.items) { + if (key instanceof resolveSeq.Scalar) { + if (seenKeys.includes(key.value)) { + const msg = 'Ordered maps must not include duplicate keys'; + throw new PlainValue.YAMLSemanticError(cst, msg); + } else { + seenKeys.push(key.value); + } + } + } + + return Object.assign(new YAMLOMap(), pairs); +} + +function createOMap(schema, iterable, ctx) { + const pairs = createPairs(schema, iterable, ctx); + const omap = new YAMLOMap(); + omap.items = pairs.items; + return omap; +} + +const omap = { + identify: value => value instanceof Map, + nodeClass: YAMLOMap, + default: false, + tag: 'tag:yaml.org,2002:omap', + resolve: parseOMap, + createNode: createOMap +}; + +class YAMLSet extends resolveSeq.YAMLMap { + constructor() { + super(); + this.tag = YAMLSet.tag; + } + + add(key) { + const pair = key instanceof resolveSeq.Pair ? key : new resolveSeq.Pair(key); + const prev = resolveSeq.findPair(this.items, pair.key); + if (!prev) this.items.push(pair); + } + + get(key, keepPair) { + const pair = resolveSeq.findPair(this.items, key); + return !keepPair && pair instanceof resolveSeq.Pair ? pair.key instanceof resolveSeq.Scalar ? pair.key.value : pair.key : pair; + } + + set(key, value) { + if (typeof value !== 'boolean') throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); + const prev = resolveSeq.findPair(this.items, key); + + if (prev && !value) { + this.items.splice(this.items.indexOf(prev), 1); + } else if (!prev && value) { + this.items.push(new resolveSeq.Pair(key)); + } + } + + toJSON(_, ctx) { + return super.toJSON(_, ctx, Set); + } + + toString(ctx, onComment, onChompKeep) { + if (!ctx) return JSON.stringify(this); + if (this.hasAllNullValues()) return super.toString(ctx, onComment, onChompKeep);else throw new Error('Set items must all have null values'); + } + +} + +PlainValue._defineProperty(YAMLSet, "tag", 'tag:yaml.org,2002:set'); + +function parseSet(doc, cst) { + const map = resolveSeq.resolveMap(doc, cst); + if (!map.hasAllNullValues()) throw new PlainValue.YAMLSemanticError(cst, 'Set items must all have null values'); + return Object.assign(new YAMLSet(), map); +} + +function createSet(schema, iterable, ctx) { + const set = new YAMLSet(); + + for (const value of iterable) set.items.push(schema.createPair(value, null, ctx)); + + return set; +} + +const set = { + identify: value => value instanceof Set, + nodeClass: YAMLSet, + default: false, + tag: 'tag:yaml.org,2002:set', + resolve: parseSet, + createNode: createSet +}; + +const parseSexagesimal = (sign, parts) => { + const n = parts.split(':').reduce((n, p) => n * 60 + Number(p), 0); + return sign === '-' ? -n : n; +}; // hhhh:mm:ss.sss + + +const stringifySexagesimal = ({ + value +}) => { + if (isNaN(value) || !isFinite(value)) return resolveSeq.stringifyNumber(value); + let sign = ''; + + if (value < 0) { + sign = '-'; + value = Math.abs(value); + } + + const parts = [value % 60]; // seconds, including ms + + if (value < 60) { + parts.unshift(0); // at least one : is required + } else { + value = Math.round((value - parts[0]) / 60); + parts.unshift(value % 60); // minutes + + if (value >= 60) { + value = Math.round((value - parts[0]) / 60); + parts.unshift(value); // hours + } + } + + return sign + parts.map(n => n < 10 ? '0' + String(n) : String(n)).join(':').replace(/000000\d*$/, '') // % 60 may introduce error + ; +}; + +const intTime = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'TIME', + test: /^([-+]?)([0-9][0-9_]*(?::[0-5]?[0-9])+)$/, + resolve: (str, sign, parts) => parseSexagesimal(sign, parts.replace(/_/g, '')), + stringify: stringifySexagesimal +}; +const floatTime = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'TIME', + test: /^([-+]?)([0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*)$/, + resolve: (str, sign, parts) => parseSexagesimal(sign, parts.replace(/_/g, '')), + stringify: stringifySexagesimal +}; +const timestamp = { + identify: value => value instanceof Date, + default: true, + tag: 'tag:yaml.org,2002:timestamp', + // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part + // may be omitted altogether, resulting in a date format. In such a case, the time part is + // assumed to be 00:00:00Z (start of day, UTC). + test: RegExp('^(?:' + '([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd + '(?:(?:t|T|[ \\t]+)' + // t | T | whitespace + '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? + '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 + ')?' + ')$'), + resolve: (str, year, month, day, hour, minute, second, millisec, tz) => { + if (millisec) millisec = (millisec + '00').substr(1, 3); + let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec || 0); + + if (tz && tz !== 'Z') { + let d = parseSexagesimal(tz[0], tz.slice(1)); + if (Math.abs(d) < 30) d *= 60; + date -= 60000 * d; + } + + return new Date(date); + }, + stringify: ({ + value + }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '') +}; + +/* global console, process, YAML_SILENCE_DEPRECATION_WARNINGS, YAML_SILENCE_WARNINGS */ +function shouldWarn(deprecation) { + const env = typeof process !== 'undefined' && process.env || {}; + + if (deprecation) { + if (typeof YAML_SILENCE_DEPRECATION_WARNINGS !== 'undefined') return !YAML_SILENCE_DEPRECATION_WARNINGS; + return !env.YAML_SILENCE_DEPRECATION_WARNINGS; + } + + if (typeof YAML_SILENCE_WARNINGS !== 'undefined') return !YAML_SILENCE_WARNINGS; + return !env.YAML_SILENCE_WARNINGS; +} + +function warn(warning, type) { + if (shouldWarn(false)) { + const emit = typeof process !== 'undefined' && process.emitWarning; // This will throw in Jest if `warning` is an Error instance due to + // https://github.com/facebook/jest/issues/2549 + + if (emit) emit(warning, type);else { + // eslint-disable-next-line no-console + console.warn(type ? `${type}: ${warning}` : warning); + } + } +} +function warnFileDeprecation(filename) { + if (shouldWarn(true)) { + const path = filename.replace(/.*yaml[/\\]/i, '').replace(/\.js$/, '').replace(/\\/g, '/'); + warn(`The endpoint 'yaml/${path}' will be removed in a future release.`, 'DeprecationWarning'); + } +} +const warned = {}; +function warnOptionDeprecation(name, alternative) { + if (!warned[name] && shouldWarn(true)) { + warned[name] = true; + let msg = `The option '${name}' will be removed in a future release`; + msg += alternative ? `, use '${alternative}' instead.` : '.'; + warn(msg, 'DeprecationWarning'); + } +} + +exports.binary = binary; +exports.floatTime = floatTime; +exports.intTime = intTime; +exports.omap = omap; +exports.pairs = pairs; +exports.set = set; +exports.timestamp = timestamp; +exports.warn = warn; +exports.warnFileDeprecation = warnFileDeprecation; +exports.warnOptionDeprecation = warnOptionDeprecation; diff --git a/node_modules/yaml/index.d.ts b/node_modules/yaml/index.d.ts new file mode 100644 index 0000000..53eb011 --- /dev/null +++ b/node_modules/yaml/index.d.ts @@ -0,0 +1,372 @@ +import { CST } from './parse-cst' +import { + AST, + Alias, + Collection, + Merge, + Node, + Scalar, + Schema, + YAMLMap, + YAMLSeq +} from './types' +import { Type, YAMLError, YAMLWarning } from './util' + +export { AST, CST } +export { default as parseCST } from './parse-cst' + +/** + * `yaml` defines document-specific options in three places: as an argument of + * parse, create and stringify calls, in the values of `YAML.defaultOptions`, + * and in the version-dependent `YAML.Document.defaults` object. Values set in + * `YAML.defaultOptions` override version-dependent defaults, and argument + * options override both. + */ +export const defaultOptions: Options + +export interface Options extends Schema.Options { + /** + * Default prefix for anchors. + * + * Default: `'a'`, resulting in anchors `a1`, `a2`, etc. + */ + anchorPrefix?: string + /** + * The number of spaces to use when indenting code. + * + * Default: `2` + */ + indent?: number + /** + * Whether block sequences should be indented. + * + * Default: `true` + */ + indentSeq?: boolean + /** + * Allow non-JSON JavaScript objects to remain in the `toJSON` output. + * Relevant with the YAML 1.1 `!!timestamp` and `!!binary` tags as well as BigInts. + * + * Default: `true` + */ + keepBlobsInJSON?: boolean + /** + * Include references in the AST to each node's corresponding CST node. + * + * Default: `false` + */ + keepCstNodes?: boolean + /** + * Store the original node type when parsing documents. + * + * Default: `true` + */ + keepNodeTypes?: boolean + /** + * When outputting JS, use Map rather than Object to represent mappings. + * + * Default: `false` + */ + mapAsMap?: boolean + /** + * Prevent exponential entity expansion attacks by limiting data aliasing count; + * set to `-1` to disable checks; `0` disallows all alias nodes. + * + * Default: `100` + */ + maxAliasCount?: number + /** + * Include line position & node type directly in errors; drop their verbose source and context. + * + * Default: `false` + */ + prettyErrors?: boolean + /** + * When stringifying, require keys to be scalars and to use implicit rather than explicit notation. + * + * Default: `false` + */ + simpleKeys?: boolean + /** + * The YAML version used by documents without a `%YAML` directive. + * + * Default: `"1.2"` + */ + version?: '1.0' | '1.1' | '1.2' +} + +/** + * Some customization options are availabe to control the parsing and + * stringification of scalars. Note that these values are used by all documents. + */ +export const scalarOptions: { + binary: scalarOptions.Binary + bool: scalarOptions.Bool + int: scalarOptions.Int + null: scalarOptions.Null + str: scalarOptions.Str +} +export namespace scalarOptions { + interface Binary { + /** + * The type of string literal used to stringify `!!binary` values. + * + * Default: `'BLOCK_LITERAL'` + */ + defaultType: Scalar.Type + /** + * Maximum line width for `!!binary`. + * + * Default: `76` + */ + lineWidth: number + } + + interface Bool { + /** + * String representation for `true`. With the core schema, use `'true' | 'True' | 'TRUE'`. + * + * Default: `'true'` + */ + trueStr: string + /** + * String representation for `false`. With the core schema, use `'false' | 'False' | 'FALSE'`. + * + * Default: `'false'` + */ + falseStr: string + } + + interface Int { + /** + * Whether integers should be parsed into BigInt values. + * + * Default: `false` + */ + asBigInt: boolean + } + + interface Null { + /** + * String representation for `null`. With the core schema, use `'null' | 'Null' | 'NULL' | '~' | ''`. + * + * Default: `'null'` + */ + nullStr: string + } + + interface Str { + /** + * The default type of string literal used to stringify values + * + * Default: `'PLAIN'` + */ + defaultType: Scalar.Type + doubleQuoted: { + /** + * Whether to restrict double-quoted strings to use JSON-compatible syntax. + * + * Default: `false` + */ + jsonEncoding: boolean + /** + * Minimum length to use multiple lines to represent the value. + * + * Default: `40` + */ + minMultiLineLength: number + } + fold: { + /** + * Maximum line width (set to `0` to disable folding). + * + * Default: `80` + */ + lineWidth: number + /** + * Minimum width for highly-indented content. + * + * Default: `20` + */ + minContentWidth: number + } + } +} + +export class Document extends Collection { + cstNode?: CST.Document + constructor(options?: Options) + tag: never + directivesEndMarker?: boolean + type: Type.DOCUMENT + /** + * Anchors associated with the document's nodes; + * also provides alias & merge node creators. + */ + anchors: Document.Anchors + /** The document contents. */ + contents: any + /** Errors encountered during parsing. */ + errors: YAMLError[] + /** + * The schema used with the document. Use `setSchema()` to change or + * initialise. + */ + schema?: Schema + /** + * Array of prefixes; each will have a string `handle` that + * starts and ends with `!` and a string `prefix` that the handle will be replaced by. + */ + tagPrefixes: Document.TagPrefix[] + /** + * The parsed version of the source document; + * if true-ish, stringified output will include a `%YAML` directive. + */ + version?: string + /** Warnings encountered during parsing. */ + warnings: YAMLWarning[] + /** + * List the tags used in the document that are not in the default + * `tag:yaml.org,2002:` namespace. + */ + listNonDefaultTags(): string[] + /** Parse a CST into this document */ + parse(cst: CST.Document): this + /** + * When a document is created with `new YAML.Document()`, the schema object is + * not set as it may be influenced by parsed directives; call this with no + * arguments to set it manually, or with arguments to change the schema used + * by the document. + **/ + setSchema( + id?: Options['version'] | Schema.Name, + customTags?: (Schema.TagId | Schema.Tag)[] + ): void + /** Set `handle` as a shorthand string for the `prefix` tag namespace. */ + setTagPrefix(handle: string, prefix: string): void + /** + * A plain JavaScript representation of the document `contents`. + * + * @param arg Used by `JSON.stringify` to indicate the array index or property + * name. If its value is a `string` and the document `contents` has a scalar + * value, the `keepBlobsInJSON` option has no effect. + * @param onAnchor If defined, called with the resolved `value` and reference + * `count` for each anchor in the document. + * */ + toJSON(arg?: string, onAnchor?: (value: any, count: number) => void): any + /** A YAML representation of the document. */ + toString(): string +} + +export namespace Document { + interface Parsed extends Document { + contents: Scalar | YAMLMap | YAMLSeq | null + /** The schema used with the document. */ + schema: Schema + } + + interface Anchors { + /** + * Create a new `Alias` node, adding the required anchor for `node`. + * If `name` is empty, a new anchor name will be generated. + */ + createAlias(node: Node, name?: string): Alias + /** + * Create a new `Merge` node with the given source nodes. + * Non-`Alias` sources will be automatically wrapped. + */ + createMergePair(...nodes: Node[]): Merge + /** The anchor name associated with `node`, if set. */ + getName(node: Node): undefined | string + /** List of all defined anchor names. */ + getNames(): string[] + /** The node associated with the anchor `name`, if set. */ + getNode(name: string): undefined | Node + /** + * Find an available anchor name with the given `prefix` and a + * numerical suffix. + */ + newName(prefix: string): string + /** + * Associate an anchor with `node`. If `name` is empty, a new name will be generated. + * To remove an anchor, use `setAnchor(null, name)`. + */ + setAnchor(node: Node | null, name?: string): void | string + } + + interface TagPrefix { + handle: string + prefix: string + } +} + +/** + * Recursively turns objects into collections. Generic objects as well as `Map` + * and its descendants become mappings, while arrays and other iterable objects + * result in sequences. + * + * The primary purpose of this function is to enable attaching comments or other + * metadata to a value, or to otherwise exert more fine-grained control over the + * stringified output. To that end, you'll need to assign its return value to + * the `contents` of a Document (or somewhere within said contents), as the + * document's schema is required for YAML string output. + * + * @param wrapScalars If undefined or `true`, also wraps plain values in + * `Scalar` objects; if `false` and `value` is not an object, it will be + * returned directly. + * @param tag Use to specify the collection type, e.g. `"!!omap"`. Note that + * this requires the corresponding tag to be available based on the default + * options. To use a specific document's schema, use `doc.schema.createNode`. + */ +export function createNode( + value: any, + wrapScalars?: true, + tag?: string +): YAMLMap | YAMLSeq | Scalar + +/** + * YAML.createNode recursively turns objects into Map and arrays to Seq collections. + * Its primary use is to enable attaching comments or other metadata to a value, + * or to otherwise exert more fine-grained control over the stringified output. + * + * Doesn't wrap plain values in Scalar objects. + */ +export function createNode( + value: any, + wrapScalars: false, + tag?: string +): YAMLMap | YAMLSeq | string | number | boolean | null + +/** + * Parse an input string into a single YAML.Document. + */ +export function parseDocument(str: string, options?: Options): Document.Parsed + +/** + * Parse the input as a stream of YAML documents. + * + * Documents should be separated from each other by `...` or `---` marker lines. + */ +export function parseAllDocuments( + str: string, + options?: Options +): Document.Parsed[] + +/** + * Parse an input string into JavaScript. + * + * Only supports input consisting of a single YAML document; for multi-document + * support you should use `YAML.parseAllDocuments`. May throw on error, and may + * log warnings using `console.warn`. + * + * @param str A string with YAML formatting. + * @returns The value will match the type of the root value of the parsed YAML + * document, so Maps become objects, Sequences arrays, and scalars result in + * nulls, booleans, numbers and strings. + */ +export function parse(str: string, options?: Options): any + +/** + * @returns Will always include \n as the last character, as is expected of YAML documents. + */ +export function stringify(value: any, options?: Options): string diff --git a/node_modules/yaml/index.js b/node_modules/yaml/index.js new file mode 100644 index 0000000..b501ac4 --- /dev/null +++ b/node_modules/yaml/index.js @@ -0,0 +1 @@ +module.exports = require('./dist').YAML diff --git a/node_modules/yaml/map.js b/node_modules/yaml/map.js new file mode 100644 index 0000000..78f2ebc --- /dev/null +++ b/node_modules/yaml/map.js @@ -0,0 +1,2 @@ +module.exports = require('./dist/types').YAMLMap +require('./dist/legacy-exports').warnFileDeprecation(__filename) diff --git a/node_modules/yaml/package.json b/node_modules/yaml/package.json new file mode 100644 index 0000000..d91b0dd --- /dev/null +++ b/node_modules/yaml/package.json @@ -0,0 +1,106 @@ +{ + "name": "yaml", + "version": "1.10.2", + "license": "ISC", + "author": "Eemeli Aro <eemeli@gmail.com>", + "repository": "github:eemeli/yaml", + "description": "JavaScript parser and stringifier for YAML", + "keywords": [ + "YAML", + "parser", + "stringifier" + ], + "homepage": "https://eemeli.org/yaml/v1/", + "files": [ + "browser/", + "dist/", + "types/", + "*.d.ts", + "*.js", + "*.mjs", + "!*config.js" + ], + "type": "commonjs", + "main": "./index.js", + "browser": { + "./index.js": "./browser/index.js", + "./map.js": "./browser/map.js", + "./pair.js": "./browser/pair.js", + "./parse-cst.js": "./browser/parse-cst.js", + "./scalar.js": "./browser/scalar.js", + "./schema.js": "./browser/schema.js", + "./seq.js": "./browser/seq.js", + "./types.js": "./browser/types.js", + "./types.mjs": "./browser/types.js", + "./types/binary.js": "./browser/types/binary.js", + "./types/omap.js": "./browser/types/omap.js", + "./types/pairs.js": "./browser/types/pairs.js", + "./types/set.js": "./browser/types/set.js", + "./types/timestamp.js": "./browser/types/timestamp.js", + "./util.js": "./browser/util.js", + "./util.mjs": "./browser/util.js" + }, + "exports": { + ".": "./index.js", + "./parse-cst": "./parse-cst.js", + "./types": [ + { + "import": "./types.mjs" + }, + "./types.js" + ], + "./util": [ + { + "import": "./util.mjs" + }, + "./util.js" + ], + "./": "./" + }, + "scripts": { + "build": "npm run build:node && npm run build:browser", + "build:browser": "rollup -c rollup.browser-config.js", + "build:node": "rollup -c rollup.node-config.js", + "clean": "git clean -fdxe node_modules", + "lint": "eslint src/", + "prettier": "prettier --write .", + "start": "cross-env TRACE_LEVEL=log npm run build:node && node -i -e 'YAML=require(\".\")'", + "test": "jest", + "test:browsers": "cd playground && npm test", + "test:dist": "npm run build:node && jest", + "test:types": "tsc --lib ES2017 --noEmit tests/typings.ts", + "docs:install": "cd docs-slate && bundle install", + "docs:deploy": "cd docs-slate && ./deploy.sh", + "docs": "cd docs-slate && bundle exec middleman server", + "preversion": "npm test && npm run build", + "prepublishOnly": "npm run clean && npm test && npm run build" + }, + "browserslist": "> 0.5%, not dead", + "prettier": { + "arrowParens": "avoid", + "semi": false, + "singleQuote": true, + "trailingComma": "none" + }, + "devDependencies": { + "@babel/core": "^7.12.10", + "@babel/plugin-proposal-class-properties": "^7.12.1", + "@babel/preset-env": "^7.12.11", + "@rollup/plugin-babel": "^5.2.3", + "babel-eslint": "^10.1.0", + "babel-jest": "^26.6.3", + "babel-plugin-trace": "^1.1.0", + "common-tags": "^1.8.0", + "cross-env": "^7.0.3", + "eslint": "^7.19.0", + "eslint-config-prettier": "^7.2.0", + "fast-check": "^2.12.0", + "jest": "^26.6.3", + "prettier": "^2.2.1", + "rollup": "^2.38.2", + "typescript": "^4.1.3" + }, + "engines": { + "node": ">= 6" + } +} diff --git a/node_modules/yaml/pair.js b/node_modules/yaml/pair.js new file mode 100644 index 0000000..b2880a2 --- /dev/null +++ b/node_modules/yaml/pair.js @@ -0,0 +1,2 @@ +module.exports = require('./dist/types').Pair +require('./dist/legacy-exports').warnFileDeprecation(__filename) diff --git a/node_modules/yaml/parse-cst.d.ts b/node_modules/yaml/parse-cst.d.ts new file mode 100644 index 0000000..9fd2ebf --- /dev/null +++ b/node_modules/yaml/parse-cst.d.ts @@ -0,0 +1,191 @@ +import { Type, YAMLSyntaxError } from './util' + +export default function parseCST(str: string): ParsedCST + +export interface ParsedCST extends Array<CST.Document> { + setOrigRanges(): boolean +} + +export namespace CST { + interface Range { + start: number + end: number + origStart?: number + origEnd?: number + isEmpty(): boolean + } + + interface ParseContext { + /** Node starts at beginning of line */ + atLineStart: boolean + /** true if currently in a collection context */ + inCollection: boolean + /** true if currently in a flow context */ + inFlow: boolean + /** Current level of indentation */ + indent: number + /** Start of the current line */ + lineStart: number + /** The parent of the node */ + parent: Node + /** Source of the YAML document */ + src: string + } + + interface Node { + context: ParseContext | null + /** if not null, indicates a parser failure */ + error: YAMLSyntaxError | null + /** span of context.src parsed into this node */ + range: Range | null + valueRange: Range | null + /** anchors, tags and comments */ + props: Range[] + /** specific node type */ + type: Type + /** if non-null, overrides source value */ + value: string | null + + readonly anchor: string | null + readonly comment: string | null + readonly hasComment: boolean + readonly hasProps: boolean + readonly jsonLike: boolean + readonly rangeAsLinePos: null | { + start: { line: number; col: number } + end?: { line: number; col: number } + } + readonly rawValue: string | null + readonly tag: + | null + | { verbatim: string } + | { handle: string; suffix: string } + readonly valueRangeContainsNewline: boolean + } + + interface Alias extends Node { + type: Type.ALIAS + /** contain the anchor without the * prefix */ + readonly rawValue: string + } + + type Scalar = BlockValue | PlainValue | QuoteValue + + interface BlockValue extends Node { + type: Type.BLOCK_FOLDED | Type.BLOCK_LITERAL + chomping: 'CLIP' | 'KEEP' | 'STRIP' + blockIndent: number | null + header: Range + readonly strValue: string | null + } + + interface BlockFolded extends BlockValue { + type: Type.BLOCK_FOLDED + } + + interface BlockLiteral extends BlockValue { + type: Type.BLOCK_LITERAL + } + + interface PlainValue extends Node { + type: Type.PLAIN + readonly strValue: string | null + } + + interface QuoteValue extends Node { + type: Type.QUOTE_DOUBLE | Type.QUOTE_SINGLE + readonly strValue: + | null + | string + | { str: string; errors: YAMLSyntaxError[] } + } + + interface QuoteDouble extends QuoteValue { + type: Type.QUOTE_DOUBLE + } + + interface QuoteSingle extends QuoteValue { + type: Type.QUOTE_SINGLE + } + + interface Comment extends Node { + type: Type.COMMENT + readonly anchor: null + readonly comment: string + readonly rawValue: null + readonly tag: null + } + + interface BlankLine extends Node { + type: Type.BLANK_LINE + } + + interface MapItem extends Node { + type: Type.MAP_KEY | Type.MAP_VALUE + node: ContentNode | null + } + + interface MapKey extends MapItem { + type: Type.MAP_KEY + } + + interface MapValue extends MapItem { + type: Type.MAP_VALUE + } + + interface Map extends Node { + type: Type.MAP + /** implicit keys are not wrapped */ + items: Array<BlankLine | Comment | Alias | Scalar | MapItem> + } + + interface SeqItem extends Node { + type: Type.SEQ_ITEM + node: ContentNode | null + } + + interface Seq extends Node { + type: Type.SEQ + items: Array<BlankLine | Comment | SeqItem> + } + + interface FlowChar { + char: '{' | '}' | '[' | ']' | ',' | '?' | ':' + offset: number + origOffset?: number + } + + interface FlowCollection extends Node { + type: Type.FLOW_MAP | Type.FLOW_SEQ + items: Array< + FlowChar | BlankLine | Comment | Alias | Scalar | FlowCollection + > + } + + interface FlowMap extends FlowCollection { + type: Type.FLOW_MAP + } + + interface FlowSeq extends FlowCollection { + type: Type.FLOW_SEQ + } + + type ContentNode = Alias | Scalar | Map | Seq | FlowCollection + + interface Directive extends Node { + type: Type.DIRECTIVE + name: string + readonly anchor: null + readonly parameters: string[] + readonly tag: null + } + + interface Document extends Node { + type: Type.DOCUMENT + directives: Array<BlankLine | Comment | Directive> + contents: Array<BlankLine | Comment | ContentNode> + readonly anchor: null + readonly comment: null + readonly tag: null + } +} diff --git a/node_modules/yaml/parse-cst.js b/node_modules/yaml/parse-cst.js new file mode 100644 index 0000000..8065756 --- /dev/null +++ b/node_modules/yaml/parse-cst.js @@ -0,0 +1 @@ +module.exports = require('./dist/parse-cst').parse diff --git a/node_modules/yaml/scalar.js b/node_modules/yaml/scalar.js new file mode 100644 index 0000000..deee1b0 --- /dev/null +++ b/node_modules/yaml/scalar.js @@ -0,0 +1,2 @@ +module.exports = require('./dist/types').Scalar +require('./dist/legacy-exports').warnFileDeprecation(__filename) diff --git a/node_modules/yaml/schema.js b/node_modules/yaml/schema.js new file mode 100644 index 0000000..4139c4b --- /dev/null +++ b/node_modules/yaml/schema.js @@ -0,0 +1,9 @@ +const types = require('./dist/types') +const util = require('./dist/util') + +module.exports = types.Schema +module.exports.nullOptions = types.nullOptions +module.exports.strOptions = types.strOptions +module.exports.stringify = util.stringifyString + +require('./dist/legacy-exports').warnFileDeprecation(__filename) diff --git a/node_modules/yaml/seq.js b/node_modules/yaml/seq.js new file mode 100644 index 0000000..66147df --- /dev/null +++ b/node_modules/yaml/seq.js @@ -0,0 +1,2 @@ +module.exports = require('./dist/types').YAMLSeq +require('./dist/legacy-exports').warnFileDeprecation(__filename) diff --git a/node_modules/yaml/types.d.ts b/node_modules/yaml/types.d.ts new file mode 100644 index 0000000..411e74e --- /dev/null +++ b/node_modules/yaml/types.d.ts @@ -0,0 +1,407 @@ +import { Document, scalarOptions } from './index' +import { CST } from './parse-cst' +import { Type } from './util' + +export const binaryOptions: scalarOptions.Binary +export const boolOptions: scalarOptions.Bool +export const intOptions: scalarOptions.Int +export const nullOptions: scalarOptions.Null +export const strOptions: scalarOptions.Str + +export class Schema { + /** Default: `'tag:yaml.org,2002:'` */ + static defaultPrefix: string + static defaultTags: { + /** Default: `'tag:yaml.org,2002:map'` */ + MAP: string + /** Default: `'tag:yaml.org,2002:seq'` */ + SEQ: string + /** Default: `'tag:yaml.org,2002:str'` */ + STR: string + } + constructor(options: Schema.Options) + /** + * Convert any value into a `Node` using this schema, recursively turning + * objects into collections. + * + * @param wrapScalars If `true`, also wraps plain values in `Scalar` objects; + * if undefined or `false` and `value` is not an object, it will be returned + * directly. + * @param tag Use to specify the collection type, e.g. `"!!omap"`. Note that + * this requires the corresponding tag to be available in this schema. + */ + createNode( + value: any, + wrapScalars?: boolean, + tag?: string, + ctx?: Schema.CreateNodeContext + ): Node + /** + * Convert a key and a value into a `Pair` using this schema, recursively + * wrapping all values as `Scalar` or `Collection` nodes. + * + * @param ctx To not wrap scalars, use a context `{ wrapScalars: false }` + */ + createPair(key: any, value: any, ctx?: Schema.CreateNodeContext): Pair + merge: boolean + name: Schema.Name + sortMapEntries: ((a: Pair, b: Pair) => number) | null + tags: Schema.Tag[] +} + +export namespace Schema { + type Name = 'core' | 'failsafe' | 'json' | 'yaml-1.1' + + interface Options { + /** + * Array of additional tags to include in the schema, or a function that may + * modify the schema's base tag array. + */ + customTags?: (TagId | Tag)[] | ((tags: Tag[]) => Tag[]) + /** + * Enable support for `<<` merge keys. + * + * Default: `false` for YAML 1.2, `true` for earlier versions + */ + merge?: boolean + /** + * The base schema to use. + * + * Default: `"core"` for YAML 1.2, `"yaml-1.1"` for earlier versions + */ + schema?: Name + /** + * When stringifying, sort map entries. If `true`, sort by comparing key values with `<`. + * + * Default: `false` + */ + sortMapEntries?: boolean | ((a: Pair, b: Pair) => number) + /** + * @deprecated Use `customTags` instead. + */ + tags?: Options['customTags'] + } + + interface CreateNodeContext { + wrapScalars?: boolean + [key: string]: any + } + + interface StringifyContext { + forceBlockIndent?: boolean + implicitKey?: boolean + indent?: string + indentAtStart?: number + inFlow?: boolean + [key: string]: any + } + + type TagId = + | 'binary' + | 'bool' + | 'float' + | 'floatExp' + | 'floatNaN' + | 'floatTime' + | 'int' + | 'intHex' + | 'intOct' + | 'intTime' + | 'null' + | 'omap' + | 'pairs' + | 'set' + | 'timestamp' + + type Tag = CustomTag | DefaultTag + + interface BaseTag { + /** + * An optional factory function, used e.g. by collections when wrapping JS objects as AST nodes. + */ + createNode?: ( + schema: Schema, + value: any, + ctx: Schema.CreateNodeContext + ) => YAMLMap | YAMLSeq | Scalar + /** + * If a tag has multiple forms that should be parsed and/or stringified differently, use `format` to identify them. + */ + format?: string + /** + * Used by `YAML.createNode` to detect your data type, e.g. using `typeof` or + * `instanceof`. + */ + identify(value: any): boolean + /** + * The `Node` child class that implements this tag. Required for collections and tags that have overlapping JS representations. + */ + nodeClass?: new () => any + /** + * Used by some tags to configure their stringification, where applicable. + */ + options?: object + /** + * Optional function stringifying the AST node in the current context. If your + * data includes a suitable `.toString()` method, you can probably leave this + * undefined and use the default stringifier. + * + * @param item The node being stringified. + * @param ctx Contains the stringifying context variables. + * @param onComment Callback to signal that the stringifier includes the + * item's comment in its output. + * @param onChompKeep Callback to signal that the output uses a block scalar + * type with the `+` chomping indicator. + */ + stringify?: ( + item: Node, + ctx: Schema.StringifyContext, + onComment?: () => void, + onChompKeep?: () => void + ) => string + /** + * The identifier for your data type, with which its stringified form will be + * prefixed. Should either be a !-prefixed local `!tag`, or a fully qualified + * `tag:domain,date:foo`. + */ + tag: string + } + + interface CustomTag extends BaseTag { + /** + * A JavaScript class that should be matched to this tag, e.g. `Date` for `!!timestamp`. + * @deprecated Use `Tag.identify` instead + */ + class?: new () => any + /** + * Turns a CST node into an AST node. If returning a non-`Node` value, the + * output will be wrapped as a `Scalar`. + */ + resolve(doc: Document, cstNode: CST.Node): Node | any + } + + interface DefaultTag extends BaseTag { + /** + * If `true`, together with `test` allows for values to be stringified without + * an explicit tag. For most cases, it's unlikely that you'll actually want to + * use this, even if you first think you do. + */ + default: true + /** + * Alternative form used by default tags; called with `test` match results. + */ + resolve(...match: string[]): Node | any + /** + * Together with `default` allows for values to be stringified without an + * explicit tag and detected using a regular expression. For most cases, it's + * unlikely that you'll actually want to use these, even if you first think + * you do. + */ + test: RegExp + } +} + +export class Node { + /** A comment on or immediately after this */ + comment?: string | null + /** A comment before this */ + commentBefore?: string | null + /** Only available when `keepCstNodes` is set to `true` */ + cstNode?: CST.Node + /** + * The [start, end] range of characters of the source parsed + * into this node (undefined for pairs or if not parsed) + */ + range?: [number, number] | null + /** A blank line before this node and its commentBefore */ + spaceBefore?: boolean + /** A fully qualified tag, if required */ + tag?: string + /** A plain JS representation of this node */ + toJSON(arg?: any): any + /** The type of this node */ + type?: Type | Pair.Type +} + +export class Scalar extends Node { + constructor(value: any) + type?: Scalar.Type + /** + * By default (undefined), numbers use decimal notation. + * The YAML 1.2 core schema only supports 'HEX' and 'OCT'. + */ + format?: 'BIN' | 'HEX' | 'OCT' | 'TIME' + value: any + toJSON(arg?: any, ctx?: AST.NodeToJsonContext): any + toString(): string +} +export namespace Scalar { + type Type = + | Type.BLOCK_FOLDED + | Type.BLOCK_LITERAL + | Type.PLAIN + | Type.QUOTE_DOUBLE + | Type.QUOTE_SINGLE +} + +export class Alias extends Node { + type: Type.ALIAS + source: Node + cstNode?: CST.Alias + toString(ctx: Schema.StringifyContext): string +} + +export class Pair extends Node { + constructor(key: any, value?: any) + type: Pair.Type.PAIR | Pair.Type.MERGE_PAIR + /** Always Node or null when parsed, but can be set to anything. */ + key: any + /** Always Node or null when parsed, but can be set to anything. */ + value: any + cstNode?: never // no corresponding cstNode + toJSON(arg?: any, ctx?: AST.NodeToJsonContext): object | Map<any, any> + toString( + ctx?: Schema.StringifyContext, + onComment?: () => void, + onChompKeep?: () => void + ): string +} +export namespace Pair { + enum Type { + PAIR = 'PAIR', + MERGE_PAIR = 'MERGE_PAIR' + } +} + +export class Merge extends Pair { + type: Pair.Type.MERGE_PAIR + /** Always Scalar('<<'), defined by the type specification */ + key: AST.PlainValue + /** Always YAMLSeq<Alias(Map)>, stringified as *A if length = 1 */ + value: YAMLSeq + toString(ctx?: Schema.StringifyContext, onComment?: () => void): string +} + +export class Collection extends Node { + type?: Type.MAP | Type.FLOW_MAP | Type.SEQ | Type.FLOW_SEQ | Type.DOCUMENT + items: any[] + schema?: Schema + + /** + * Adds a value to the collection. For `!!map` and `!!omap` the value must + * be a Pair instance or a `{ key, value }` object, which may not have a key + * that already exists in the map. + */ + add(value: any): void + addIn(path: Iterable<any>, value: any): void + /** + * Removes a value from the collection. + * @returns `true` if the item was found and removed. + */ + delete(key: any): boolean + deleteIn(path: Iterable<any>): boolean + /** + * Returns item at `key`, or `undefined` if not found. By default unwraps + * scalar values from their surrounding node; to disable set `keepScalar` to + * `true` (collections are always returned intact). + */ + get(key: any, keepScalar?: boolean): any + getIn(path: Iterable<any>, keepScalar?: boolean): any + /** + * Checks if the collection includes a value with the key `key`. + */ + has(key: any): boolean + hasIn(path: Iterable<any>): boolean + /** + * Sets a value in this collection. For `!!set`, `value` needs to be a + * boolean to add/remove the item from the set. + */ + set(key: any, value: any): void + setIn(path: Iterable<any>, value: any): void +} + +export class YAMLMap extends Collection { + type?: Type.FLOW_MAP | Type.MAP + items: Array<Pair> + hasAllNullValues(): boolean + toJSON(arg?: any, ctx?: AST.NodeToJsonContext): object | Map<any, any> + toString( + ctx?: Schema.StringifyContext, + onComment?: () => void, + onChompKeep?: () => void + ): string +} + +export class YAMLSeq extends Collection { + type?: Type.FLOW_SEQ | Type.SEQ + delete(key: number | string | Scalar): boolean + get(key: number | string | Scalar, keepScalar?: boolean): any + has(key: number | string | Scalar): boolean + set(key: number | string | Scalar, value: any): void + hasAllNullValues(): boolean + toJSON(arg?: any, ctx?: AST.NodeToJsonContext): any[] + toString( + ctx?: Schema.StringifyContext, + onComment?: () => void, + onChompKeep?: () => void + ): string +} + +export namespace AST { + interface NodeToJsonContext { + anchors?: any[] + doc: Document + keep?: boolean + mapAsMap?: boolean + maxAliasCount?: number + onCreate?: (node: Node) => void + [key: string]: any + } + + interface BlockFolded extends Scalar { + type: Type.BLOCK_FOLDED + cstNode?: CST.BlockFolded + } + + interface BlockLiteral extends Scalar { + type: Type.BLOCK_LITERAL + cstNode?: CST.BlockLiteral + } + + interface PlainValue extends Scalar { + type: Type.PLAIN + cstNode?: CST.PlainValue + } + + interface QuoteDouble extends Scalar { + type: Type.QUOTE_DOUBLE + cstNode?: CST.QuoteDouble + } + + interface QuoteSingle extends Scalar { + type: Type.QUOTE_SINGLE + cstNode?: CST.QuoteSingle + } + + interface FlowMap extends YAMLMap { + type: Type.FLOW_MAP + cstNode?: CST.FlowMap + } + + interface BlockMap extends YAMLMap { + type: Type.MAP + cstNode?: CST.Map + } + + interface FlowSeq extends YAMLSeq { + type: Type.FLOW_SEQ + items: Array<Node> + cstNode?: CST.FlowSeq + } + + interface BlockSeq extends YAMLSeq { + type: Type.SEQ + items: Array<Node | null> + cstNode?: CST.Seq + } +} diff --git a/node_modules/yaml/types.js b/node_modules/yaml/types.js new file mode 100644 index 0000000..8874749 --- /dev/null +++ b/node_modules/yaml/types.js @@ -0,0 +1,17 @@ +const types = require('./dist/types') + +exports.binaryOptions = types.binaryOptions +exports.boolOptions = types.boolOptions +exports.intOptions = types.intOptions +exports.nullOptions = types.nullOptions +exports.strOptions = types.strOptions + +exports.Schema = types.Schema +exports.Alias = types.Alias +exports.Collection = types.Collection +exports.Merge = types.Merge +exports.Node = types.Node +exports.Pair = types.Pair +exports.Scalar = types.Scalar +exports.YAMLMap = types.YAMLMap +exports.YAMLSeq = types.YAMLSeq diff --git a/node_modules/yaml/types.mjs b/node_modules/yaml/types.mjs new file mode 100644 index 0000000..7132288 --- /dev/null +++ b/node_modules/yaml/types.mjs @@ -0,0 +1,17 @@ +import types from './dist/types.js' + +export const binaryOptions = types.binaryOptions +export const boolOptions = types.boolOptions +export const intOptions = types.intOptions +export const nullOptions = types.nullOptions +export const strOptions = types.strOptions + +export const Schema = types.Schema +export const Alias = types.Alias +export const Collection = types.Collection +export const Merge = types.Merge +export const Node = types.Node +export const Pair = types.Pair +export const Scalar = types.Scalar +export const YAMLMap = types.YAMLMap +export const YAMLSeq = types.YAMLSeq diff --git a/node_modules/yaml/types/binary.js b/node_modules/yaml/types/binary.js new file mode 100644 index 0000000..271b9de --- /dev/null +++ b/node_modules/yaml/types/binary.js @@ -0,0 +1,8 @@ +'use strict' +Object.defineProperty(exports, '__esModule', { value: true }) + +const legacy = require('../dist/legacy-exports') +exports.binary = legacy.binary +exports.default = [exports.binary] + +legacy.warnFileDeprecation(__filename) diff --git a/node_modules/yaml/types/omap.js b/node_modules/yaml/types/omap.js new file mode 100644 index 0000000..37b638b --- /dev/null +++ b/node_modules/yaml/types/omap.js @@ -0,0 +1,3 @@ +const legacy = require('../dist/legacy-exports') +module.exports = legacy.omap +legacy.warnFileDeprecation(__filename) diff --git a/node_modules/yaml/types/pairs.js b/node_modules/yaml/types/pairs.js new file mode 100644 index 0000000..f1df201 --- /dev/null +++ b/node_modules/yaml/types/pairs.js @@ -0,0 +1,3 @@ +const legacy = require('../dist/legacy-exports') +module.exports = legacy.pairs +legacy.warnFileDeprecation(__filename) diff --git a/node_modules/yaml/types/set.js b/node_modules/yaml/types/set.js new file mode 100644 index 0000000..e7dc9d4 --- /dev/null +++ b/node_modules/yaml/types/set.js @@ -0,0 +1,3 @@ +const legacy = require('../dist/legacy-exports') +module.exports = legacy.set +legacy.warnFileDeprecation(__filename) diff --git a/node_modules/yaml/types/timestamp.js b/node_modules/yaml/types/timestamp.js new file mode 100644 index 0000000..39c5b6d --- /dev/null +++ b/node_modules/yaml/types/timestamp.js @@ -0,0 +1,10 @@ +'use strict' +Object.defineProperty(exports, '__esModule', { value: true }) + +const legacy = require('../dist/legacy-exports') +exports.default = [legacy.intTime, legacy.floatTime, legacy.timestamp] +exports.floatTime = legacy.floatTime +exports.intTime = legacy.intTime +exports.timestamp = legacy.timestamp + +legacy.warnFileDeprecation(__filename) diff --git a/node_modules/yaml/util.d.ts b/node_modules/yaml/util.d.ts new file mode 100644 index 0000000..b135541 --- /dev/null +++ b/node_modules/yaml/util.d.ts @@ -0,0 +1,86 @@ +import { Document } from './index' +import { CST } from './parse-cst' +import { AST, Pair, Scalar, Schema } from './types' + +export function findPair(items: any[], key: Scalar | any): Pair | undefined + +export function parseMap(doc: Document, cst: CST.Map): AST.BlockMap +export function parseMap(doc: Document, cst: CST.FlowMap): AST.FlowMap +export function parseSeq(doc: Document, cst: CST.Seq): AST.BlockSeq +export function parseSeq(doc: Document, cst: CST.FlowSeq): AST.FlowSeq + +export function stringifyNumber(item: Scalar): string +export function stringifyString( + item: Scalar, + ctx: Schema.StringifyContext, + onComment?: () => void, + onChompKeep?: () => void +): string + +export function toJSON( + value: any, + arg?: any, + ctx?: Schema.CreateNodeContext +): any + +export enum Type { + ALIAS = 'ALIAS', + BLANK_LINE = 'BLANK_LINE', + BLOCK_FOLDED = 'BLOCK_FOLDED', + BLOCK_LITERAL = 'BLOCK_LITERAL', + COMMENT = 'COMMENT', + DIRECTIVE = 'DIRECTIVE', + DOCUMENT = 'DOCUMENT', + FLOW_MAP = 'FLOW_MAP', + FLOW_SEQ = 'FLOW_SEQ', + MAP = 'MAP', + MAP_KEY = 'MAP_KEY', + MAP_VALUE = 'MAP_VALUE', + PLAIN = 'PLAIN', + QUOTE_DOUBLE = 'QUOTE_DOUBLE', + QUOTE_SINGLE = 'QUOTE_SINGLE', + SEQ = 'SEQ', + SEQ_ITEM = 'SEQ_ITEM' +} + +interface LinePos { + line: number + col: number +} + +export class YAMLError extends Error { + name: + | 'YAMLReferenceError' + | 'YAMLSemanticError' + | 'YAMLSyntaxError' + | 'YAMLWarning' + message: string + source?: CST.Node + + nodeType?: Type + range?: CST.Range + linePos?: { start: LinePos; end: LinePos } + + /** + * Drops `source` and adds `nodeType`, `range` and `linePos`, as well as + * adding details to `message`. Run automatically for document errors if + * the `prettyErrors` option is set. + */ + makePretty(): void +} + +export class YAMLReferenceError extends YAMLError { + name: 'YAMLReferenceError' +} + +export class YAMLSemanticError extends YAMLError { + name: 'YAMLSemanticError' +} + +export class YAMLSyntaxError extends YAMLError { + name: 'YAMLSyntaxError' +} + +export class YAMLWarning extends YAMLError { + name: 'YAMLWarning' +} diff --git a/node_modules/yaml/util.js b/node_modules/yaml/util.js new file mode 100644 index 0000000..06dd2c9 --- /dev/null +++ b/node_modules/yaml/util.js @@ -0,0 +1,16 @@ +const util = require('./dist/util') + +exports.findPair = util.findPair +exports.toJSON = util.toJSON +exports.parseMap = util.parseMap +exports.parseSeq = util.parseSeq + +exports.stringifyNumber = util.stringifyNumber +exports.stringifyString = util.stringifyString +exports.Type = util.Type + +exports.YAMLError = util.YAMLError +exports.YAMLReferenceError = util.YAMLReferenceError +exports.YAMLSemanticError = util.YAMLSemanticError +exports.YAMLSyntaxError = util.YAMLSyntaxError +exports.YAMLWarning = util.YAMLWarning diff --git a/node_modules/yaml/util.mjs b/node_modules/yaml/util.mjs new file mode 100644 index 0000000..89e654a --- /dev/null +++ b/node_modules/yaml/util.mjs @@ -0,0 +1,18 @@ +import util from './dist/util.js' + +export const findPair = util.findPair +export const toJSON = util.toJSON + +export const parseMap = util.parseMap +export const parseSeq = util.parseSeq + +export const stringifyNumber = util.stringifyNumber +export const stringifyString = util.stringifyString + +export const Type = util.Type + +export const YAMLError = util.YAMLError +export const YAMLReferenceError = util.YAMLReferenceError +export const YAMLSemanticError = util.YAMLSemanticError +export const YAMLSyntaxError = util.YAMLSyntaxError +export const YAMLWarning = util.YAMLWarning diff --git a/package-lock.json b/package-lock.json index a58a412..f389014 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5,7 +5,20 @@ "packages": { "": { "dependencies": { - "markdown-it": "^12.3.2" + "ejs": "^3.1.6", + "markdown-it": "^12.3.2", + "yaml": "^1.10.2" + } + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" } }, "node_modules/argparse": { @@ -13,6 +26,70 @@ "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, + "node_modules/async": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", + "integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "node_modules/ejs": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.6.tgz", + "integrity": "sha512-9lt9Zse4hPucPkoP7FHDF0LQAlGyF9JVpnClFLFH3aSSbxmyoqINRpp/9wePWJTUl4KOQwRL72Iw3InHPDkoGw==", + "dependencies": { + "jake": "^10.6.1" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/entities": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", @@ -21,6 +98,47 @@ "url": "https://github.com/fb55/entities?sponsor=1" } }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/filelist": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.2.tgz", + "integrity": "sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ==", + "dependencies": { + "minimatch": "^3.0.4" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "engines": { + "node": ">=4" + } + }, + "node_modules/jake": { + "version": "10.8.2", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.2.tgz", + "integrity": "sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A==", + "dependencies": { + "async": "0.9.x", + "chalk": "^2.4.2", + "filelist": "^1.0.1", + "minimatch": "^3.0.4" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": "*" + } + }, "node_modules/linkify-it": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", @@ -49,23 +167,145 @@ "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=" }, + "node_modules/minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/uc.micro": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==" + }, + "node_modules/yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==", + "engines": { + "node": ">= 6" + } } }, "dependencies": { + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "requires": { + "color-convert": "^1.9.0" + } + }, "argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, + "async": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", + "integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=" + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "requires": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "ejs": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.6.tgz", + "integrity": "sha512-9lt9Zse4hPucPkoP7FHDF0LQAlGyF9JVpnClFLFH3aSSbxmyoqINRpp/9wePWJTUl4KOQwRL72Iw3InHPDkoGw==", + "requires": { + "jake": "^10.6.1" + } + }, "entities": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==" }, + "escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=" + }, + "filelist": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.2.tgz", + "integrity": "sha512-z7O0IS8Plc39rTCq6i6iHxk43duYOn8uFJiWSewIq0Bww1RNybVHSCjahmcC87ZqAm4OTvFzlzeGu3XAzG1ctQ==", + "requires": { + "minimatch": "^3.0.4" + } + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=" + }, + "jake": { + "version": "10.8.2", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.8.2.tgz", + "integrity": "sha512-eLpKyrfG3mzvGE2Du8VoPbeSkRry093+tyNjdYaBbJS9v17knImYGNXQCUV0gLxQtF82m3E8iRb/wdSQZLoq7A==", + "requires": { + "async": "0.9.x", + "chalk": "^2.4.2", + "filelist": "^1.0.1", + "minimatch": "^3.0.4" + } + }, "linkify-it": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", @@ -91,10 +331,31 @@ "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=" }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "requires": { + "has-flag": "^3.0.0" + } + }, "uc.micro": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==" + }, + "yaml": { + "version": "1.10.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz", + "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==" } } } diff --git a/package.json b/package.json index a59d0ea..9056f06 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,7 @@ { "dependencies": { - "markdown-it": "^12.3.2" + "ejs": "^3.1.6", + "markdown-it": "^12.3.2", + "yaml": "^1.10.2" } } diff --git a/templates/footer.ejs b/templates/footer.ejs new file mode 100644 index 0000000..f9ac4de --- /dev/null +++ b/templates/footer.ejs @@ -0,0 +1,9 @@ + </main> + <footer class="<%- config.classes.footer.join(" ") %>"> + <hr> + <p style="text-align:center;"> + <i>generated by <a href="https://gitlab.minteck.org/minteck/cobalt">Cobalt</a> v<%- config._version %></i> + </p> + </footer> +</body> +</html>
\ No newline at end of file diff --git a/templates/footer.html b/templates/footer.html deleted file mode 100644 index e656a2a..0000000 --- a/templates/footer.html +++ /dev/null @@ -1,4 +0,0 @@ - </main> - <!-- TODO: put your copyright, footer, etc... here --> -</body> -</html>
\ No newline at end of file diff --git a/templates/header.ejs b/templates/header.ejs new file mode 100644 index 0000000..2a2b565 --- /dev/null +++ b/templates/header.ejs @@ -0,0 +1,13 @@ +<!DOCTYPE html> +<html lang="en" class="<%- config.classes.html.join(" ") %>"> +<head> + <meta charset="UTF-8"> + <meta http-equiv="X-UA-Compatible" content="IE=edge"> + <meta name="viewport" content="width=device-width, initial-scale=1.0"> + <title><%- name %> | <%- config.site_name %></title> + <link rel="stylesheet" href="<%- config.imports.css.join('"><link rel="stylesheet" href="') %>"> + <script src="<%- config.imports.css.join('</script><script src="') %>"></script> +</head> +<body class="<%- config.classes.body.join(" ") %>"> + <!-- TODO: put your navigation, header, etc... here --> + <main class="<%- config.classes.main.join(" ") %>">
\ No newline at end of file diff --git a/templates/header.html b/templates/header.html deleted file mode 100644 index f61e243..0000000 --- a/templates/header.html +++ /dev/null @@ -1,11 +0,0 @@ -<!DOCTYPE html> -<html lang="en"> -<head> - <meta charset="UTF-8"> - <meta http-equiv="X-UA-Compatible" content="IE=edge"> - <meta name="viewport" content="width=device-width, initial-scale=1.0"> - <title>$TITLE$ | My website</title> -</head> -<body> - <!-- TODO: put your navigation, header, etc... here --> - <main>
\ No newline at end of file |