summaryrefslogtreecommitdiff
path: root/node_modules
diff options
context:
space:
mode:
authorMinteck <contact@minteck.org>2022-04-09 18:38:29 +0200
committerMinteck <contact@minteck.org>2022-04-09 18:38:29 +0200
commit457c93328f5b51b3fc0aa1600ce8eb8b45a5c2a9 (patch)
treea899daf11edb7ce6a99700e462a5740e013f5951 /node_modules
downloadsnowjail-457c93328f5b51b3fc0aa1600ce8eb8b45a5c2a9.tar.gz
snowjail-457c93328f5b51b3fc0aa1600ce8eb8b45a5c2a9.tar.bz2
snowjail-457c93328f5b51b3fc0aa1600ce8eb8b45a5c2a9.zip
Snowjail v0.1
Diffstat (limited to 'node_modules')
l---------node_modules/.bin/uuid1
-rw-r--r--node_modules/.package-lock.json35
-rw-r--r--node_modules/chalk/license9
-rw-r--r--node_modules/chalk/package.json78
-rw-r--r--node_modules/chalk/readme.md317
-rw-r--r--node_modules/chalk/source/index.d.ts318
-rw-r--r--node_modules/chalk/source/index.js212
-rw-r--r--node_modules/chalk/source/utilities.js33
-rw-r--r--node_modules/chalk/source/vendor/ansi-styles/index.d.ts190
-rw-r--r--node_modules/chalk/source/vendor/ansi-styles/index.js219
-rw-r--r--node_modules/chalk/source/vendor/supports-color/browser.d.ts1
-rw-r--r--node_modules/chalk/source/vendor/supports-color/browser.js17
-rw-r--r--node_modules/chalk/source/vendor/supports-color/index.d.ts55
-rw-r--r--node_modules/chalk/source/vendor/supports-color/index.js169
-rw-r--r--node_modules/uuid/CHANGELOG.md229
-rw-r--r--node_modules/uuid/CONTRIBUTING.md18
-rw-r--r--node_modules/uuid/LICENSE.md9
-rw-r--r--node_modules/uuid/README.md505
-rwxr-xr-xnode_modules/uuid/dist/bin/uuid2
-rw-r--r--node_modules/uuid/dist/esm-browser/index.js9
-rw-r--r--node_modules/uuid/dist/esm-browser/md5.js215
-rw-r--r--node_modules/uuid/dist/esm-browser/nil.js1
-rw-r--r--node_modules/uuid/dist/esm-browser/parse.js35
-rw-r--r--node_modules/uuid/dist/esm-browser/regex.js1
-rw-r--r--node_modules/uuid/dist/esm-browser/rng.js19
-rw-r--r--node_modules/uuid/dist/esm-browser/sha1.js96
-rw-r--r--node_modules/uuid/dist/esm-browser/stringify.js30
-rw-r--r--node_modules/uuid/dist/esm-browser/v1.js95
-rw-r--r--node_modules/uuid/dist/esm-browser/v3.js4
-rw-r--r--node_modules/uuid/dist/esm-browser/v35.js64
-rw-r--r--node_modules/uuid/dist/esm-browser/v4.js24
-rw-r--r--node_modules/uuid/dist/esm-browser/v5.js4
-rw-r--r--node_modules/uuid/dist/esm-browser/validate.js7
-rw-r--r--node_modules/uuid/dist/esm-browser/version.js11
-rw-r--r--node_modules/uuid/dist/esm-node/index.js9
-rw-r--r--node_modules/uuid/dist/esm-node/md5.js13
-rw-r--r--node_modules/uuid/dist/esm-node/nil.js1
-rw-r--r--node_modules/uuid/dist/esm-node/parse.js35
-rw-r--r--node_modules/uuid/dist/esm-node/regex.js1
-rw-r--r--node_modules/uuid/dist/esm-node/rng.js12
-rw-r--r--node_modules/uuid/dist/esm-node/sha1.js13
-rw-r--r--node_modules/uuid/dist/esm-node/stringify.js29
-rw-r--r--node_modules/uuid/dist/esm-node/v1.js95
-rw-r--r--node_modules/uuid/dist/esm-node/v3.js4
-rw-r--r--node_modules/uuid/dist/esm-node/v35.js64
-rw-r--r--node_modules/uuid/dist/esm-node/v4.js24
-rw-r--r--node_modules/uuid/dist/esm-node/v5.js4
-rw-r--r--node_modules/uuid/dist/esm-node/validate.js7
-rw-r--r--node_modules/uuid/dist/esm-node/version.js11
-rw-r--r--node_modules/uuid/dist/index.js79
-rw-r--r--node_modules/uuid/dist/md5-browser.js223
-rw-r--r--node_modules/uuid/dist/md5.js23
-rw-r--r--node_modules/uuid/dist/nil.js8
-rw-r--r--node_modules/uuid/dist/parse.js45
-rw-r--r--node_modules/uuid/dist/regex.js8
-rw-r--r--node_modules/uuid/dist/rng-browser.js26
-rw-r--r--node_modules/uuid/dist/rng.js24
-rw-r--r--node_modules/uuid/dist/sha1-browser.js104
-rw-r--r--node_modules/uuid/dist/sha1.js23
-rw-r--r--node_modules/uuid/dist/stringify.js39
-rw-r--r--node_modules/uuid/dist/umd/uuid.min.js1
-rw-r--r--node_modules/uuid/dist/umd/uuidNIL.min.js1
-rw-r--r--node_modules/uuid/dist/umd/uuidParse.min.js1
-rw-r--r--node_modules/uuid/dist/umd/uuidStringify.min.js1
-rw-r--r--node_modules/uuid/dist/umd/uuidValidate.min.js1
-rw-r--r--node_modules/uuid/dist/umd/uuidVersion.min.js1
-rw-r--r--node_modules/uuid/dist/umd/uuidv1.min.js1
-rw-r--r--node_modules/uuid/dist/umd/uuidv3.min.js1
-rw-r--r--node_modules/uuid/dist/umd/uuidv4.min.js1
-rw-r--r--node_modules/uuid/dist/umd/uuidv5.min.js1
-rw-r--r--node_modules/uuid/dist/uuid-bin.js85
-rw-r--r--node_modules/uuid/dist/v1.js107
-rw-r--r--node_modules/uuid/dist/v3.js16
-rw-r--r--node_modules/uuid/dist/v35.js78
-rw-r--r--node_modules/uuid/dist/v4.js37
-rw-r--r--node_modules/uuid/dist/v5.js16
-rw-r--r--node_modules/uuid/dist/validate.js17
-rw-r--r--node_modules/uuid/dist/version.js21
-rw-r--r--node_modules/uuid/package.json135
-rw-r--r--node_modules/uuid/wrapper.mjs10
-rw-r--r--node_modules/yaml/LICENSE13
-rw-r--r--node_modules/yaml/README.md143
-rw-r--r--node_modules/yaml/browser/dist/compose/compose-collection.js59
-rw-r--r--node_modules/yaml/browser/dist/compose/compose-doc.js40
-rw-r--r--node_modules/yaml/browser/dist/compose/compose-node.js90
-rw-r--r--node_modules/yaml/browser/dist/compose/compose-scalar.js81
-rw-r--r--node_modules/yaml/browser/dist/compose/composer.js218
-rw-r--r--node_modules/yaml/browser/dist/compose/resolve-block-map.js107
-rw-r--r--node_modules/yaml/browser/dist/compose/resolve-block-scalar.js194
-rw-r--r--node_modules/yaml/browser/dist/compose/resolve-block-seq.js45
-rw-r--r--node_modules/yaml/browser/dist/compose/resolve-end.js37
-rw-r--r--node_modules/yaml/browser/dist/compose/resolve-flow-collection.js201
-rw-r--r--node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js224
-rw-r--r--node_modules/yaml/browser/dist/compose/resolve-props.js130
-rw-r--r--node_modules/yaml/browser/dist/compose/util-contains-newline.js34
-rw-r--r--node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js27
-rw-r--r--node_modules/yaml/browser/dist/compose/util-flow-indent-check.js15
-rw-r--r--node_modules/yaml/browser/dist/compose/util-map-includes.js17
-rw-r--r--node_modules/yaml/browser/dist/doc/Document.js332
-rw-r--r--node_modules/yaml/browser/dist/doc/anchors.js72
-rw-r--r--node_modules/yaml/browser/dist/doc/applyReviver.js54
-rw-r--r--node_modules/yaml/browser/dist/doc/createNode.js87
-rw-r--r--node_modules/yaml/browser/dist/doc/directives.js169
-rw-r--r--node_modules/yaml/browser/dist/errors.js57
-rw-r--r--node_modules/yaml/browser/dist/index.js17
-rw-r--r--node_modules/yaml/browser/dist/log.js14
-rw-r--r--node_modules/yaml/browser/dist/node_modules/tslib/tslib.es6.js164
-rw-r--r--node_modules/yaml/browser/dist/nodes/Alias.js94
-rw-r--r--node_modules/yaml/browser/dist/nodes/Collection.js146
-rw-r--r--node_modules/yaml/browser/dist/nodes/Node.js48
-rw-r--r--node_modules/yaml/browser/dist/nodes/Pair.js36
-rw-r--r--node_modules/yaml/browser/dist/nodes/Scalar.js23
-rw-r--r--node_modules/yaml/browser/dist/nodes/YAMLMap.js117
-rw-r--r--node_modules/yaml/browser/dist/nodes/YAMLSeq.js105
-rw-r--r--node_modules/yaml/browser/dist/nodes/addPairToJSMap.js104
-rw-r--r--node_modules/yaml/browser/dist/nodes/toJS.js37
-rw-r--r--node_modules/yaml/browser/dist/parse/cst-scalar.js215
-rw-r--r--node_modules/yaml/browser/dist/parse/cst-stringify.js61
-rw-r--r--node_modules/yaml/browser/dist/parse/cst-visit.js97
-rw-r--r--node_modules/yaml/browser/dist/parse/cst.js98
-rw-r--r--node_modules/yaml/browser/dist/parse/lexer.js702
-rw-r--r--node_modules/yaml/browser/dist/parse/line-counter.js39
-rw-r--r--node_modules/yaml/browser/dist/parse/parser.js952
-rw-r--r--node_modules/yaml/browser/dist/public-api.js100
-rw-r--r--node_modules/yaml/browser/dist/schema/Schema.js38
-rw-r--r--node_modules/yaml/browser/dist/schema/common/map.js42
-rw-r--r--node_modules/yaml/browser/dist/schema/common/null.js15
-rw-r--r--node_modules/yaml/browser/dist/schema/common/seq.js33
-rw-r--r--node_modules/yaml/browser/dist/schema/common/string.js14
-rw-r--r--node_modules/yaml/browser/dist/schema/core/bool.js19
-rw-r--r--node_modules/yaml/browser/dist/schema/core/float.js43
-rw-r--r--node_modules/yaml/browser/dist/schema/core/int.js38
-rw-r--r--node_modules/yaml/browser/dist/schema/core/schema.js23
-rw-r--r--node_modules/yaml/browser/dist/schema/json/schema.js62
-rw-r--r--node_modules/yaml/browser/dist/schema/tags.js83
-rw-r--r--node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js66
-rw-r--r--node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js26
-rw-r--r--node_modules/yaml/browser/dist/schema/yaml-1.1/float.js46
-rw-r--r--node_modules/yaml/browser/dist/schema/yaml-1.1/int.js71
-rw-r--r--node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js73
-rw-r--r--node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js78
-rw-r--r--node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js37
-rw-r--r--node_modules/yaml/browser/dist/schema/yaml-1.1/set.js87
-rw-r--r--node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js101
-rw-r--r--node_modules/yaml/browser/dist/stringify/foldFlowLines.js135
-rw-r--r--node_modules/yaml/browser/dist/stringify/stringify.js124
-rw-r--r--node_modules/yaml/browser/dist/stringify/stringifyCollection.js152
-rw-r--r--node_modules/yaml/browser/dist/stringify/stringifyComment.js20
-rw-r--r--node_modules/yaml/browser/dist/stringify/stringifyDocument.js86
-rw-r--r--node_modules/yaml/browser/dist/stringify/stringifyNumber.js24
-rw-r--r--node_modules/yaml/browser/dist/stringify/stringifyPair.js125
-rw-r--r--node_modules/yaml/browser/dist/stringify/stringifyString.js314
-rw-r--r--node_modules/yaml/browser/dist/util.js9
-rw-r--r--node_modules/yaml/browser/dist/visit.js234
-rw-r--r--node_modules/yaml/browser/index.js5
-rw-r--r--node_modules/yaml/browser/package.json3
-rw-r--r--node_modules/yaml/dist/compose/compose-collection.d.ts5
-rw-r--r--node_modules/yaml/dist/compose/compose-collection.js61
-rw-r--r--node_modules/yaml/dist/compose/compose-doc.d.ts6
-rw-r--r--node_modules/yaml/dist/compose/compose-doc.js42
-rw-r--r--node_modules/yaml/dist/compose/compose-node.d.ts26
-rw-r--r--node_modules/yaml/dist/compose/compose-node.js93
-rw-r--r--node_modules/yaml/dist/compose/compose-scalar.d.ts5
-rw-r--r--node_modules/yaml/dist/compose/compose-scalar.js83
-rw-r--r--node_modules/yaml/dist/compose/composer.d.ts62
-rw-r--r--node_modules/yaml/dist/compose/composer.js222
-rw-r--r--node_modules/yaml/dist/compose/resolve-block-map.d.ts6
-rw-r--r--node_modules/yaml/dist/compose/resolve-block-map.js109
-rw-r--r--node_modules/yaml/dist/compose/resolve-block-scalar.d.ts10
-rw-r--r--node_modules/yaml/dist/compose/resolve-block-scalar.js196
-rw-r--r--node_modules/yaml/dist/compose/resolve-block-seq.d.ts5
-rw-r--r--node_modules/yaml/dist/compose/resolve-block-seq.js47
-rw-r--r--node_modules/yaml/dist/compose/resolve-end.d.ts6
-rw-r--r--node_modules/yaml/dist/compose/resolve-end.js39
-rw-r--r--node_modules/yaml/dist/compose/resolve-flow-collection.d.ts6
-rw-r--r--node_modules/yaml/dist/compose/resolve-flow-collection.js203
-rw-r--r--node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts10
-rw-r--r--node_modules/yaml/dist/compose/resolve-flow-scalar.js226
-rw-r--r--node_modules/yaml/dist/compose/resolve-props.d.ts21
-rw-r--r--node_modules/yaml/dist/compose/resolve-props.js132
-rw-r--r--node_modules/yaml/dist/compose/util-contains-newline.d.ts2
-rw-r--r--node_modules/yaml/dist/compose/util-contains-newline.js36
-rw-r--r--node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts2
-rw-r--r--node_modules/yaml/dist/compose/util-empty-scalar-position.js29
-rw-r--r--node_modules/yaml/dist/compose/util-flow-indent-check.d.ts3
-rw-r--r--node_modules/yaml/dist/compose/util-flow-indent-check.js17
-rw-r--r--node_modules/yaml/dist/compose/util-map-includes.d.ts4
-rw-r--r--node_modules/yaml/dist/compose/util-map-includes.js19
-rw-r--r--node_modules/yaml/dist/doc/Document.d.ts139
-rw-r--r--node_modules/yaml/dist/doc/Document.js334
-rw-r--r--node_modules/yaml/dist/doc/anchors.d.ts24
-rw-r--r--node_modules/yaml/dist/doc/anchors.js77
-rw-r--r--node_modules/yaml/dist/doc/applyReviver.d.ts9
-rw-r--r--node_modules/yaml/dist/doc/applyReviver.js56
-rw-r--r--node_modules/yaml/dist/doc/createNode.d.ts17
-rw-r--r--node_modules/yaml/dist/doc/createNode.js89
-rw-r--r--node_modules/yaml/dist/doc/directives.d.ts49
-rw-r--r--node_modules/yaml/dist/doc/directives.js171
-rw-r--r--node_modules/yaml/dist/errors.d.ts21
-rw-r--r--node_modules/yaml/dist/errors.js62
-rw-r--r--node_modules/yaml/dist/index.d.ts19
-rw-r--r--node_modules/yaml/dist/index.js50
-rw-r--r--node_modules/yaml/dist/log.d.ts3
-rw-r--r--node_modules/yaml/dist/log.js17
-rw-r--r--node_modules/yaml/dist/node_modules/tslib/tslib.es6.js76
-rw-r--r--node_modules/yaml/dist/nodes/Alias.d.ts28
-rw-r--r--node_modules/yaml/dist/nodes/Alias.js96
-rw-r--r--node_modules/yaml/dist/nodes/Collection.d.ts73
-rw-r--r--node_modules/yaml/dist/nodes/Collection.js150
-rw-r--r--node_modules/yaml/dist/nodes/Node.d.ts53
-rw-r--r--node_modules/yaml/dist/nodes/Node.js66
-rw-r--r--node_modules/yaml/dist/nodes/Pair.d.ts21
-rw-r--r--node_modules/yaml/dist/nodes/Pair.js39
-rw-r--r--node_modules/yaml/dist/nodes/Scalar.d.ts42
-rw-r--r--node_modules/yaml/dist/nodes/Scalar.js26
-rw-r--r--node_modules/yaml/dist/nodes/YAMLMap.d.ts43
-rw-r--r--node_modules/yaml/dist/nodes/YAMLMap.js120
-rw-r--r--node_modules/yaml/dist/nodes/YAMLSeq.d.ts55
-rw-r--r--node_modules/yaml/dist/nodes/YAMLSeq.js107
-rw-r--r--node_modules/yaml/dist/nodes/addPairToJSMap.d.ts3
-rw-r--r--node_modules/yaml/dist/nodes/addPairToJSMap.js106
-rw-r--r--node_modules/yaml/dist/nodes/toJS.d.ts30
-rw-r--r--node_modules/yaml/dist/nodes/toJS.js39
-rw-r--r--node_modules/yaml/dist/options.d.ts330
-rw-r--r--node_modules/yaml/dist/parse/cst-scalar.d.ts64
-rw-r--r--node_modules/yaml/dist/parse/cst-scalar.js219
-rw-r--r--node_modules/yaml/dist/parse/cst-stringify.d.ts8
-rw-r--r--node_modules/yaml/dist/parse/cst-stringify.js63
-rw-r--r--node_modules/yaml/dist/parse/cst-visit.d.ts39
-rw-r--r--node_modules/yaml/dist/parse/cst-visit.js99
-rw-r--r--node_modules/yaml/dist/parse/cst.d.ts106
-rw-r--r--node_modules/yaml/dist/parse/cst.js112
-rw-r--r--node_modules/yaml/dist/parse/lexer.d.ts87
-rw-r--r--node_modules/yaml/dist/parse/lexer.js704
-rw-r--r--node_modules/yaml/dist/parse/line-counter.d.ts22
-rw-r--r--node_modules/yaml/dist/parse/line-counter.js41
-rw-r--r--node_modules/yaml/dist/parse/parser.d.ts84
-rw-r--r--node_modules/yaml/dist/parse/parser.js956
-rw-r--r--node_modules/yaml/dist/public-api.d.ts43
-rw-r--r--node_modules/yaml/dist/public-api.js105
-rw-r--r--node_modules/yaml/dist/schema/Schema.d.ts18
-rw-r--r--node_modules/yaml/dist/schema/Schema.js40
-rw-r--r--node_modules/yaml/dist/schema/common/map.d.ts2
-rw-r--r--node_modules/yaml/dist/schema/common/map.js44
-rw-r--r--node_modules/yaml/dist/schema/common/null.d.ts4
-rw-r--r--node_modules/yaml/dist/schema/common/null.js17
-rw-r--r--node_modules/yaml/dist/schema/common/seq.d.ts2
-rw-r--r--node_modules/yaml/dist/schema/common/seq.js35
-rw-r--r--node_modules/yaml/dist/schema/common/string.d.ts2
-rw-r--r--node_modules/yaml/dist/schema/common/string.js16
-rw-r--r--node_modules/yaml/dist/schema/core/bool.d.ts4
-rw-r--r--node_modules/yaml/dist/schema/core/bool.js21
-rw-r--r--node_modules/yaml/dist/schema/core/float.d.ts4
-rw-r--r--node_modules/yaml/dist/schema/core/float.js47
-rw-r--r--node_modules/yaml/dist/schema/core/int.d.ts4
-rw-r--r--node_modules/yaml/dist/schema/core/int.js42
-rw-r--r--node_modules/yaml/dist/schema/core/schema.d.ts1
-rw-r--r--node_modules/yaml/dist/schema/core/schema.js25
-rw-r--r--node_modules/yaml/dist/schema/json/schema.d.ts2
-rw-r--r--node_modules/yaml/dist/schema/json/schema.js64
-rw-r--r--node_modules/yaml/dist/schema/tags.d.ts40
-rw-r--r--node_modules/yaml/dist/schema/tags.js86
-rw-r--r--node_modules/yaml/dist/schema/types.d.ts82
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts2
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/binary.js68
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts7
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/bool.js29
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/float.d.ts4
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/float.js50
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/int.d.ts5
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/int.js76
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts21
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/omap.js76
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts10
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/pairs.js82
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts1
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/schema.js39
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/set.d.ts22
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/set.js90
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts6
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/timestamp.js105
-rw-r--r--node_modules/yaml/dist/stringify/foldFlowLines.d.ts34
-rw-r--r--node_modules/yaml/dist/stringify/foldFlowLines.js140
-rw-r--r--node_modules/yaml/dist/stringify/stringify.d.ts20
-rw-r--r--node_modules/yaml/dist/stringify/stringify.js127
-rw-r--r--node_modules/yaml/dist/stringify/stringifyCollection.d.ts17
-rw-r--r--node_modules/yaml/dist/stringify/stringifyCollection.js154
-rw-r--r--node_modules/yaml/dist/stringify/stringifyComment.d.ts10
-rw-r--r--node_modules/yaml/dist/stringify/stringifyComment.js24
-rw-r--r--node_modules/yaml/dist/stringify/stringifyDocument.d.ts3
-rw-r--r--node_modules/yaml/dist/stringify/stringifyDocument.js88
-rw-r--r--node_modules/yaml/dist/stringify/stringifyNumber.d.ts2
-rw-r--r--node_modules/yaml/dist/stringify/stringifyNumber.js26
-rw-r--r--node_modules/yaml/dist/stringify/stringifyPair.d.ts3
-rw-r--r--node_modules/yaml/dist/stringify/stringifyPair.js127
-rw-r--r--node_modules/yaml/dist/stringify/stringifyString.d.ts3
-rw-r--r--node_modules/yaml/dist/stringify/stringifyString.js316
-rw-r--r--node_modules/yaml/dist/test-events.d.ts4
-rw-r--r--node_modules/yaml/dist/test-events.js135
-rw-r--r--node_modules/yaml/dist/util.d.ts9
-rw-r--r--node_modules/yaml/dist/util.js24
-rw-r--r--node_modules/yaml/dist/visit.d.ts102
-rw-r--r--node_modules/yaml/dist/visit.js237
-rw-r--r--node_modules/yaml/package.json94
-rw-r--r--node_modules/yaml/util.d.ts3
-rw-r--r--node_modules/yaml/util.js2
306 files changed, 22453 insertions, 0 deletions
diff --git a/node_modules/.bin/uuid b/node_modules/.bin/uuid
new file mode 120000
index 0000000..588f70e
--- /dev/null
+++ b/node_modules/.bin/uuid
@@ -0,0 +1 @@
+../uuid/dist/bin/uuid \ No newline at end of file
diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json
new file mode 100644
index 0000000..c2a1844
--- /dev/null
+++ b/node_modules/.package-lock.json
@@ -0,0 +1,35 @@
+{
+ "name": "snowjail",
+ "version": "0.1.0",
+ "lockfileVersion": 2,
+ "requires": true,
+ "packages": {
+ "node_modules/chalk": {
+ "version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.0.1.tgz",
+ "integrity": "sha512-Fo07WOYGqMfCWHOzSXOt2CxDbC6skS/jO9ynEcmpANMoPrD+W1r1K6Vx7iNm+AQmETU1Xr2t+n8nzkV9t6xh3w==",
+ "engines": {
+ "node": "^12.17.0 || ^14.13 || >=16.0.0"
+ },
+ "funding": {
+ "url": "https://github.com/chalk/chalk?sponsor=1"
+ }
+ },
+ "node_modules/uuid": {
+ "version": "8.3.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+ "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
+ "bin": {
+ "uuid": "dist/bin/uuid"
+ }
+ },
+ "node_modules/yaml": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.0.0.tgz",
+ "integrity": "sha512-JbfdlHKGP2Ik9IHylzWlGd4pPK++EU46/IxMykphS2ZKw7a7h+dHNmcXObLgpRDriBY+rpWslldikckX8oruWQ==",
+ "engines": {
+ "node": ">= 14"
+ }
+ }
+ }
+}
diff --git a/node_modules/chalk/license b/node_modules/chalk/license
new file mode 100644
index 0000000..fa7ceba
--- /dev/null
+++ b/node_modules/chalk/license
@@ -0,0 +1,9 @@
+MIT License
+
+Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (https://sindresorhus.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/chalk/package.json b/node_modules/chalk/package.json
new file mode 100644
index 0000000..7daddb2
--- /dev/null
+++ b/node_modules/chalk/package.json
@@ -0,0 +1,78 @@
+{
+ "name": "chalk",
+ "version": "5.0.1",
+ "description": "Terminal string styling done right",
+ "license": "MIT",
+ "repository": "chalk/chalk",
+ "funding": "https://github.com/chalk/chalk?sponsor=1",
+ "type": "module",
+ "main": "./source/index.js",
+ "exports": "./source/index.js",
+ "imports": {
+ "#ansi-styles": "./source/vendor/ansi-styles/index.js",
+ "#supports-color": {
+ "node": "./source/vendor/supports-color/index.js",
+ "default": "./source/vendor/supports-color/browser.js"
+ }
+ },
+ "types": "./source/index.d.ts",
+ "engines": {
+ "node": "^12.17.0 || ^14.13 || >=16.0.0"
+ },
+ "scripts": {
+ "test": "xo && c8 ava && tsd",
+ "bench": "matcha benchmark.js"
+ },
+ "files": [
+ "source",
+ "!source/index.test-d.ts"
+ ],
+ "keywords": [
+ "color",
+ "colour",
+ "colors",
+ "terminal",
+ "console",
+ "cli",
+ "string",
+ "ansi",
+ "style",
+ "styles",
+ "tty",
+ "formatting",
+ "rgb",
+ "256",
+ "shell",
+ "xterm",
+ "log",
+ "logging",
+ "command-line",
+ "text"
+ ],
+ "devDependencies": {
+ "@types/node": "^16.11.10",
+ "ava": "^3.15.0",
+ "c8": "^7.10.0",
+ "color-convert": "^2.0.1",
+ "execa": "^6.0.0",
+ "log-update": "^5.0.0",
+ "matcha": "^0.7.0",
+ "tsd": "^0.19.0",
+ "xo": "^0.47.0",
+ "yoctodelay": "^2.0.0"
+ },
+ "xo": {
+ "rules": {
+ "unicorn/prefer-string-slice": "off"
+ }
+ },
+ "c8": {
+ "reporter": [
+ "text",
+ "lcov"
+ ],
+ "exclude": [
+ "source/vendor"
+ ]
+ }
+}
diff --git a/node_modules/chalk/readme.md b/node_modules/chalk/readme.md
new file mode 100644
index 0000000..f6adc80
--- /dev/null
+++ b/node_modules/chalk/readme.md
@@ -0,0 +1,317 @@
+<h1 align="center">
+ <br>
+ <br>
+ <img width="320" src="media/logo.svg" alt="Chalk">
+ <br>
+ <br>
+ <br>
+</h1>
+
+> Terminal string styling done right
+
+[![Coverage Status](https://codecov.io/gh/chalk/chalk/branch/main/graph/badge.svg)](https://codecov.io/gh/chalk/chalk)
+[![npm dependents](https://badgen.net/npm/dependents/chalk)](https://www.npmjs.com/package/chalk?activeTab=dependents) [![Downloads](https://badgen.net/npm/dt/chalk)](https://www.npmjs.com/package/chalk)
+[![run on repl.it](https://repl.it/badge/github/chalk/chalk)](https://repl.it/github/chalk/chalk)
+[![Support Chalk on DEV](https://badge.devprotocol.xyz/0x44d871aebF0126Bf646753E2C976Aa7e68A66c15/descriptive)](https://stakes.social/0x44d871aebF0126Bf646753E2C976Aa7e68A66c15)
+
+<img src="https://cdn.jsdelivr.net/gh/chalk/ansi-styles@8261697c95bf34b6c7767e2cbe9941a851d59385/screenshot.svg" width="900">
+
+<br>
+
+---
+
+<div align="center">
+ <p>
+ <p>
+ <sup>
+ Sindre Sorhus' open source work is supported by the community on <a href="https://github.com/sponsors/sindresorhus">GitHub Sponsors</a> and <a href="https://stakes.social/0x44d871aebF0126Bf646753E2C976Aa7e68A66c15">Dev</a>
+ </sup>
+ </p>
+ <sup>Special thanks to:</sup>
+ <br>
+ <br>
+ <a href="https://standardresume.co/tech">
+ <img src="https://sindresorhus.com/assets/thanks/standard-resume-logo.svg" width="160"/>
+ </a>
+ <br>
+ <br>
+ <a href="https://retool.com/?utm_campaign=sindresorhus">
+ <img src="https://sindresorhus.com/assets/thanks/retool-logo.svg" width="230"/>
+ </a>
+ <br>
+ <br>
+ <a href="https://doppler.com/?utm_campaign=github_repo&utm_medium=referral&utm_content=chalk&utm_source=github">
+ <div>
+ <img src="https://dashboard.doppler.com/imgs/logo-long.svg" width="240" alt="Doppler">
+ </div>
+ <b>All your environment variables, in one place</b>
+ <div>
+ <span>Stop struggling with scattered API keys, hacking together home-brewed tools,</span>
+ <br>
+ <span>and avoiding access controls. Keep your team and servers in sync with Doppler.</span>
+ </div>
+ </a>
+ <br>
+ <a href="https://strapi.io/?ref=sindresorhus">
+ <div>
+ <img src="https://sindresorhus.com/assets/thanks/strapi-logo-white-bg.png" width="220" alt="Strapi">
+ </div>
+ <b>Strapi is the leading open-source headless CMS.</b>
+ <div>
+ <sup>It’s 100% JavaScript, fully customizable, and developer-first.</sup>
+ </div>
+ </a>
+ </p>
+</div>
+
+---
+
+<br>
+
+## Highlights
+
+- Expressive API
+- Highly performant
+- No dependencies
+- Ability to nest styles
+- [256/Truecolor color support](#256-and-truecolor-color-support)
+- Auto-detects color support
+- Doesn't extend `String.prototype`
+- Clean and focused
+- Actively maintained
+- [Used by ~76,000 packages](https://www.npmjs.com/browse/depended/chalk) as of October 26, 2021
+
+## Install
+
+```sh
+npm install chalk
+```
+
+**IMPORTANT:** Chalk 5 is ESM. If you want to use Chalk with TypeScript or a build tool, you will probably want to use Chalk 4 for now. [Read more.](https://github.com/chalk/chalk/releases/tag/v5.0.0)
+
+## Usage
+
+```js
+import chalk from 'chalk';
+
+console.log(chalk.blue('Hello world!'));
+```
+
+Chalk comes with an easy to use composable API where you just chain and nest the styles you want.
+
+```js
+import chalk from 'chalk';
+
+const log = console.log;
+
+// Combine styled and normal strings
+log(chalk.blue('Hello') + ' World' + chalk.red('!'));
+
+// Compose multiple styles using the chainable API
+log(chalk.blue.bgRed.bold('Hello world!'));
+
+// Pass in multiple arguments
+log(chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz'));
+
+// Nest styles
+log(chalk.red('Hello', chalk.underline.bgBlue('world') + '!'));
+
+// Nest styles of the same type even (color, underline, background)
+log(chalk.green(
+ 'I am a green line ' +
+ chalk.blue.underline.bold('with a blue substring') +
+ ' that becomes green again!'
+));
+
+// ES2015 template literal
+log(`
+CPU: ${chalk.red('90%')}
+RAM: ${chalk.green('40%')}
+DISK: ${chalk.yellow('70%')}
+`);
+
+// Use RGB colors in terminal emulators that support it.
+log(chalk.rgb(123, 45, 67).underline('Underlined reddish color'));
+log(chalk.hex('#DEADED').bold('Bold gray!'));
+```
+
+Easily define your own themes:
+
+```js
+import chalk from 'chalk';
+
+const error = chalk.bold.red;
+const warning = chalk.hex('#FFA500'); // Orange color
+
+console.log(error('Error!'));
+console.log(warning('Warning!'));
+```
+
+Take advantage of console.log [string substitution](https://nodejs.org/docs/latest/api/console.html#console_console_log_data_args):
+
+```js
+import chalk from 'chalk';
+
+const name = 'Sindre';
+console.log(chalk.green('Hello %s'), name);
+//=> 'Hello Sindre'
+```
+
+## API
+
+### chalk.`<style>[.<style>...](string, [string...])`
+
+Example: `chalk.red.bold.underline('Hello', 'world');`
+
+Chain [styles](#styles) and call the last one as a method with a string argument. Order doesn't matter, and later styles take precedent in case of a conflict. This simply means that `chalk.red.yellow.green` is equivalent to `chalk.green`.
+
+Multiple arguments will be separated by space.
+
+### chalk.level
+
+Specifies the level of color support.
+
+Color support is automatically detected, but you can override it by setting the `level` property. You should however only do this in your own code as it applies globally to all Chalk consumers.
+
+If you need to change this in a reusable module, create a new instance:
+
+```js
+import {Chalk} from 'chalk';
+
+const customChalk = new Chalk({level: 0});
+```
+
+| Level | Description |
+| :---: | :--- |
+| `0` | All colors disabled |
+| `1` | Basic color support (16 colors) |
+| `2` | 256 color support |
+| `3` | Truecolor support (16 million colors) |
+
+### supportsColor
+
+Detect whether the terminal [supports color](https://github.com/chalk/supports-color). Used internally and handled for you, but exposed for convenience.
+
+Can be overridden by the user with the flags `--color` and `--no-color`. For situations where using `--color` is not possible, use the environment variable `FORCE_COLOR=1` (level 1), `FORCE_COLOR=2` (level 2), or `FORCE_COLOR=3` (level 3) to forcefully enable color, or `FORCE_COLOR=0` to forcefully disable. The use of `FORCE_COLOR` overrides all other color support checks.
+
+Explicit 256/Truecolor mode can be enabled using the `--color=256` and `--color=16m` flags, respectively.
+
+### chalkStderr and supportsColorStderr
+
+`chalkStderr` contains a separate instance configured with color support detected for `stderr` stream instead of `stdout`. Override rules from `supportsColor` apply to this too. `supportsColorStderr` is exposed for convenience.
+
+## Styles
+
+### Modifiers
+
+- `reset` - Reset the current style.
+- `bold` - Make the text bold.
+- `dim` - Make the text have lower opacity.
+- `italic` - Make the text italic. *(Not widely supported)*
+- `underline` - Put a horizontal line below the text. *(Not widely supported)*
+- `overline` - Put a horizontal line above the text. *(Not widely supported)*
+- `inverse`- Invert background and foreground colors.
+- `hidden` - Print the text but make it invisible.
+- `strikethrough` - Puts a horizontal line through the center of the text. *(Not widely supported)*
+- `visible`- Print the text only when Chalk has a color level above zero. Can be useful for things that are purely cosmetic.
+
+### Colors
+
+- `black`
+- `red`
+- `green`
+- `yellow`
+- `blue`
+- `magenta`
+- `cyan`
+- `white`
+- `blackBright` (alias: `gray`, `grey`)
+- `redBright`
+- `greenBright`
+- `yellowBright`
+- `blueBright`
+- `magentaBright`
+- `cyanBright`
+- `whiteBright`
+
+### Background colors
+
+- `bgBlack`
+- `bgRed`
+- `bgGreen`
+- `bgYellow`
+- `bgBlue`
+- `bgMagenta`
+- `bgCyan`
+- `bgWhite`
+- `bgBlackBright` (alias: `bgGray`, `bgGrey`)
+- `bgRedBright`
+- `bgGreenBright`
+- `bgYellowBright`
+- `bgBlueBright`
+- `bgMagentaBright`
+- `bgCyanBright`
+- `bgWhiteBright`
+
+## 256 and Truecolor color support
+
+Chalk supports 256 colors and [Truecolor](https://gist.github.com/XVilka/8346728) (16 million colors) on supported terminal apps.
+
+Colors are downsampled from 16 million RGB values to an ANSI color format that is supported by the terminal emulator (or by specifying `{level: n}` as a Chalk option). For example, Chalk configured to run at level 1 (basic color support) will downsample an RGB value of #FF0000 (red) to 31 (ANSI escape for red).
+
+Examples:
+
+- `chalk.hex('#DEADED').underline('Hello, world!')`
+- `chalk.rgb(15, 100, 204).inverse('Hello!')`
+
+Background versions of these models are prefixed with `bg` and the first level of the module capitalized (e.g. `hex` for foreground colors and `bgHex` for background colors).
+
+- `chalk.bgHex('#DEADED').underline('Hello, world!')`
+- `chalk.bgRgb(15, 100, 204).inverse('Hello!')`
+
+The following color models can be used:
+
+- [`rgb`](https://en.wikipedia.org/wiki/RGB_color_model) - Example: `chalk.rgb(255, 136, 0).bold('Orange!')`
+- [`hex`](https://en.wikipedia.org/wiki/Web_colors#Hex_triplet) - Example: `chalk.hex('#FF8800').bold('Orange!')`
+- [`ansi256`](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) - Example: `chalk.bgAnsi256(194)('Honeydew, more or less')`
+
+## Browser support
+
+Since Chrome 69, ANSI escape codes are natively supported in the developer console.
+
+## Windows
+
+If you're on Windows, do yourself a favor and use [Windows Terminal](https://github.com/microsoft/terminal) instead of `cmd.exe`.
+
+## Origin story
+
+[colors.js](https://github.com/Marak/colors.js) used to be the most popular string styling module, but it has serious deficiencies like extending `String.prototype` which causes all kinds of [problems](https://github.com/yeoman/yo/issues/68) and the package is unmaintained. Although there are other packages, they either do too much or not enough. Chalk is a clean and focused alternative.
+
+## chalk for enterprise
+
+Available as part of the Tidelift Subscription.
+
+The maintainers of chalk and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-chalk?utm_source=npm-chalk&utm_medium=referral&utm_campaign=enterprise&utm_term=repo)
+
+## Related
+
+- [chalk-template](https://github.com/chalk/chalk-template) - [Tagged template literals](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Template_literals#tagged_templates) support for this module
+- [chalk-cli](https://github.com/chalk/chalk-cli) - CLI for this module
+- [ansi-styles](https://github.com/chalk/ansi-styles) - ANSI escape codes for styling strings in the terminal
+- [supports-color](https://github.com/chalk/supports-color) - Detect whether a terminal supports color
+- [strip-ansi](https://github.com/chalk/strip-ansi) - Strip ANSI escape codes
+- [strip-ansi-stream](https://github.com/chalk/strip-ansi-stream) - Strip ANSI escape codes from a stream
+- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes
+- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes
+- [wrap-ansi](https://github.com/chalk/wrap-ansi) - Wordwrap a string with ANSI escape codes
+- [slice-ansi](https://github.com/chalk/slice-ansi) - Slice a string with ANSI escape codes
+- [color-convert](https://github.com/qix-/color-convert) - Converts colors between different models
+- [chalk-animation](https://github.com/bokub/chalk-animation) - Animate strings in the terminal
+- [gradient-string](https://github.com/bokub/gradient-string) - Apply color gradients to strings
+- [chalk-pipe](https://github.com/LitoMore/chalk-pipe) - Create chalk style schemes with simpler style strings
+- [terminal-link](https://github.com/sindresorhus/terminal-link) - Create clickable links in the terminal
+
+## Maintainers
+
+- [Sindre Sorhus](https://github.com/sindresorhus)
+- [Josh Junon](https://github.com/qix-)
diff --git a/node_modules/chalk/source/index.d.ts b/node_modules/chalk/source/index.d.ts
new file mode 100644
index 0000000..b2408c6
--- /dev/null
+++ b/node_modules/chalk/source/index.d.ts
@@ -0,0 +1,318 @@
+// TODO: Make it this when TS suports that.
+// import {ColorInfo, ColorSupportLevel} from '#supports-color';
+import {ColorInfo, ColorSupportLevel} from './vendor/supports-color/index.js';
+
+/**
+Basic foreground colors.
+
+[More colors here.](https://github.com/chalk/chalk/blob/main/readme.md#256-and-truecolor-color-support)
+*/
+export type ForegroundColor =
+ | 'black'
+ | 'red'
+ | 'green'
+ | 'yellow'
+ | 'blue'
+ | 'magenta'
+ | 'cyan'
+ | 'white'
+ | 'gray'
+ | 'grey'
+ | 'blackBright'
+ | 'redBright'
+ | 'greenBright'
+ | 'yellowBright'
+ | 'blueBright'
+ | 'magentaBright'
+ | 'cyanBright'
+ | 'whiteBright';
+
+/**
+Basic background colors.
+
+[More colors here.](https://github.com/chalk/chalk/blob/main/readme.md#256-and-truecolor-color-support)
+*/
+export type BackgroundColor =
+ | 'bgBlack'
+ | 'bgRed'
+ | 'bgGreen'
+ | 'bgYellow'
+ | 'bgBlue'
+ | 'bgMagenta'
+ | 'bgCyan'
+ | 'bgWhite'
+ | 'bgGray'
+ | 'bgGrey'
+ | 'bgBlackBright'
+ | 'bgRedBright'
+ | 'bgGreenBright'
+ | 'bgYellowBright'
+ | 'bgBlueBright'
+ | 'bgMagentaBright'
+ | 'bgCyanBright'
+ | 'bgWhiteBright';
+
+/**
+Basic colors.
+
+[More colors here.](https://github.com/chalk/chalk/blob/main/readme.md#256-and-truecolor-color-support)
+*/
+export type Color = ForegroundColor | BackgroundColor;
+
+export type Modifiers =
+ | 'reset'
+ | 'bold'
+ | 'dim'
+ | 'italic'
+ | 'underline'
+ | 'overline'
+ | 'inverse'
+ | 'hidden'
+ | 'strikethrough'
+ | 'visible';
+
+export interface Options {
+ /**
+ Specify the color support for Chalk.
+
+ By default, color support is automatically detected based on the environment.
+
+ Levels:
+ - `0` - All colors disabled.
+ - `1` - Basic 16 colors support.
+ - `2` - ANSI 256 colors support.
+ - `3` - Truecolor 16 million colors support.
+ */
+ readonly level?: ColorSupportLevel;
+}
+
+/**
+Return a new Chalk instance.
+*/
+export const Chalk: new (options?: Options) => ChalkInstance; // eslint-disable-line @typescript-eslint/naming-convention
+
+export interface ChalkInstance {
+ (...text: unknown[]): string;
+
+ /**
+ The color support for Chalk.
+
+ By default, color support is automatically detected based on the environment.
+
+ Levels:
+ - `0` - All colors disabled.
+ - `1` - Basic 16 colors support.
+ - `2` - ANSI 256 colors support.
+ - `3` - Truecolor 16 million colors support.
+ */
+ level: ColorSupportLevel;
+
+ /**
+ Use RGB values to set text color.
+
+ @example
+ ```
+ import chalk from 'chalk';
+
+ chalk.rgb(222, 173, 237);
+ ```
+ */
+ rgb: (red: number, green: number, blue: number) => this;
+
+ /**
+ Use HEX value to set text color.
+
+ @param color - Hexadecimal value representing the desired color.
+
+ @example
+ ```
+ import chalk from 'chalk';
+
+ chalk.hex('#DEADED');
+ ```
+ */
+ hex: (color: string) => this;
+
+ /**
+ Use an [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color.
+
+ @example
+ ```
+ import chalk from 'chalk';
+
+ chalk.ansi256(201);
+ ```
+ */
+ ansi256: (index: number) => this;
+
+ /**
+ Use RGB values to set background color.
+
+ @example
+ ```
+ import chalk from 'chalk';
+
+ chalk.bgRgb(222, 173, 237);
+ ```
+ */
+ bgRgb: (red: number, green: number, blue: number) => this;
+
+ /**
+ Use HEX value to set background color.
+
+ @param color - Hexadecimal value representing the desired color.
+
+ @example
+ ```
+ import chalk from 'chalk';
+
+ chalk.bgHex('#DEADED');
+ ```
+ */
+ bgHex: (color: string) => this;
+
+ /**
+ Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set background color.
+
+ @example
+ ```
+ import chalk from 'chalk';
+
+ chalk.bgAnsi256(201);
+ ```
+ */
+ bgAnsi256: (index: number) => this;
+
+ /**
+ Modifier: Reset the current style.
+ */
+ readonly reset: this;
+
+ /**
+ Modifier: Make the text bold.
+ */
+ readonly bold: this;
+
+ /**
+ Modifier: Make the text have lower opacity.
+ */
+ readonly dim: this;
+
+ /**
+ Modifier: Make the text italic. *(Not widely supported)*
+ */
+ readonly italic: this;
+
+ /**
+ Modifier: Put a horizontal line below the text. *(Not widely supported)*
+ */
+ readonly underline: this;
+
+ /**
+ Modifier: Put a horizontal line above the text. *(Not widely supported)*
+ */
+ readonly overline: this;
+
+ /**
+ Modifier: Invert background and foreground colors.
+ */
+ readonly inverse: this;
+
+ /**
+ Modifier: Print the text but make it invisible.
+ */
+ readonly hidden: this;
+
+ /**
+ Modifier: Puts a horizontal line through the center of the text. *(Not widely supported)*
+ */
+ readonly strikethrough: this;
+
+ /**
+ Modifier: Print the text only when Chalk has a color level above zero.
+
+ Can be useful for things that are purely cosmetic.
+ */
+ readonly visible: this;
+
+ readonly black: this;
+ readonly red: this;
+ readonly green: this;
+ readonly yellow: this;
+ readonly blue: this;
+ readonly magenta: this;
+ readonly cyan: this;
+ readonly white: this;
+
+ /*
+ Alias for `blackBright`.
+ */
+ readonly gray: this;
+
+ /*
+ Alias for `blackBright`.
+ */
+ readonly grey: this;
+
+ readonly blackBright: this;
+ readonly redBright: this;
+ readonly greenBright: this;
+ readonly yellowBright: this;
+ readonly blueBright: this;
+ readonly magentaBright: this;
+ readonly cyanBright: this;
+ readonly whiteBright: this;
+
+ readonly bgBlack: this;
+ readonly bgRed: this;
+ readonly bgGreen: this;
+ readonly bgYellow: this;
+ readonly bgBlue: this;
+ readonly bgMagenta: this;
+ readonly bgCyan: this;
+ readonly bgWhite: this;
+
+ /*
+ Alias for `bgBlackBright`.
+ */
+ readonly bgGray: this;
+
+ /*
+ Alias for `bgBlackBright`.
+ */
+ readonly bgGrey: this;
+
+ readonly bgBlackBright: this;
+ readonly bgRedBright: this;
+ readonly bgGreenBright: this;
+ readonly bgYellowBright: this;
+ readonly bgBlueBright: this;
+ readonly bgMagentaBright: this;
+ readonly bgCyanBright: this;
+ readonly bgWhiteBright: this;
+}
+
+/**
+Main Chalk object that allows to chain styles together.
+
+Call the last one as a method with a string argument.
+
+Order doesn't matter, and later styles take precedent in case of a conflict.
+
+This simply means that `chalk.red.yellow.green` is equivalent to `chalk.green`.
+*/
+declare const chalk: ChalkInstance;
+
+export const supportsColor: ColorInfo;
+
+export const chalkStderr: typeof chalk;
+export const supportsColorStderr: typeof supportsColor;
+
+export {
+ ColorInfo,
+ ColorSupport,
+ ColorSupportLevel,
+// } from '#supports-color';
+} from './vendor/supports-color/index.js';
+
+export default chalk;
diff --git a/node_modules/chalk/source/index.js b/node_modules/chalk/source/index.js
new file mode 100644
index 0000000..302024b
--- /dev/null
+++ b/node_modules/chalk/source/index.js
@@ -0,0 +1,212 @@
+import ansiStyles from '#ansi-styles';
+import supportsColor from '#supports-color';
+import { // eslint-disable-line import/order
+ stringReplaceAll,
+ stringEncaseCRLFWithFirstIndex,
+} from './utilities.js';
+
+const {stdout: stdoutColor, stderr: stderrColor} = supportsColor;
+
+const GENERATOR = Symbol('GENERATOR');
+const STYLER = Symbol('STYLER');
+const IS_EMPTY = Symbol('IS_EMPTY');
+
+// `supportsColor.level` → `ansiStyles.color[name]` mapping
+const levelMapping = [
+ 'ansi',
+ 'ansi',
+ 'ansi256',
+ 'ansi16m',
+];
+
+const styles = Object.create(null);
+
+const applyOptions = (object, options = {}) => {
+ if (options.level && !(Number.isInteger(options.level) && options.level >= 0 && options.level <= 3)) {
+ throw new Error('The `level` option should be an integer from 0 to 3');
+ }
+
+ // Detect level if not set manually
+ const colorLevel = stdoutColor ? stdoutColor.level : 0;
+ object.level = options.level === undefined ? colorLevel : options.level;
+};
+
+export class Chalk {
+ constructor(options) {
+ // eslint-disable-next-line no-constructor-return
+ return chalkFactory(options);
+ }
+}
+
+const chalkFactory = options => {
+ const chalk = (...strings) => strings.join(' ');
+ applyOptions(chalk, options);
+
+ Object.setPrototypeOf(chalk, createChalk.prototype);
+
+ return chalk;
+};
+
+function createChalk(options) {
+ return chalkFactory(options);
+}
+
+Object.setPrototypeOf(createChalk.prototype, Function.prototype);
+
+for (const [styleName, style] of Object.entries(ansiStyles)) {
+ styles[styleName] = {
+ get() {
+ const builder = createBuilder(this, createStyler(style.open, style.close, this[STYLER]), this[IS_EMPTY]);
+ Object.defineProperty(this, styleName, {value: builder});
+ return builder;
+ },
+ };
+}
+
+styles.visible = {
+ get() {
+ const builder = createBuilder(this, this[STYLER], true);
+ Object.defineProperty(this, 'visible', {value: builder});
+ return builder;
+ },
+};
+
+const getModelAnsi = (model, level, type, ...arguments_) => {
+ if (model === 'rgb') {
+ if (level === 'ansi16m') {
+ return ansiStyles[type].ansi16m(...arguments_);
+ }
+
+ if (level === 'ansi256') {
+ return ansiStyles[type].ansi256(ansiStyles.rgbToAnsi256(...arguments_));
+ }
+
+ return ansiStyles[type].ansi(ansiStyles.rgbToAnsi(...arguments_));
+ }
+
+ if (model === 'hex') {
+ return getModelAnsi('rgb', level, type, ...ansiStyles.hexToRgb(...arguments_));
+ }
+
+ return ansiStyles[type][model](...arguments_);
+};
+
+const usedModels = ['rgb', 'hex', 'ansi256'];
+
+for (const model of usedModels) {
+ styles[model] = {
+ get() {
+ const {level} = this;
+ return function (...arguments_) {
+ const styler = createStyler(getModelAnsi(model, levelMapping[level], 'color', ...arguments_), ansiStyles.color.close, this[STYLER]);
+ return createBuilder(this, styler, this[IS_EMPTY]);
+ };
+ },
+ };
+
+ const bgModel = 'bg' + model[0].toUpperCase() + model.slice(1);
+ styles[bgModel] = {
+ get() {
+ const {level} = this;
+ return function (...arguments_) {
+ const styler = createStyler(getModelAnsi(model, levelMapping[level], 'bgColor', ...arguments_), ansiStyles.bgColor.close, this[STYLER]);
+ return createBuilder(this, styler, this[IS_EMPTY]);
+ };
+ },
+ };
+}
+
+const proto = Object.defineProperties(() => {}, {
+ ...styles,
+ level: {
+ enumerable: true,
+ get() {
+ return this[GENERATOR].level;
+ },
+ set(level) {
+ this[GENERATOR].level = level;
+ },
+ },
+});
+
+const createStyler = (open, close, parent) => {
+ let openAll;
+ let closeAll;
+ if (parent === undefined) {
+ openAll = open;
+ closeAll = close;
+ } else {
+ openAll = parent.openAll + open;
+ closeAll = close + parent.closeAll;
+ }
+
+ return {
+ open,
+ close,
+ openAll,
+ closeAll,
+ parent,
+ };
+};
+
+const createBuilder = (self, _styler, _isEmpty) => {
+ // Single argument is hot path, implicit coercion is faster than anything
+ // eslint-disable-next-line no-implicit-coercion
+ const builder = (...arguments_) => applyStyle(builder, (arguments_.length === 1) ? ('' + arguments_[0]) : arguments_.join(' '));
+
+ // We alter the prototype because we must return a function, but there is
+ // no way to create a function with a different prototype
+ Object.setPrototypeOf(builder, proto);
+
+ builder[GENERATOR] = self;
+ builder[STYLER] = _styler;
+ builder[IS_EMPTY] = _isEmpty;
+
+ return builder;
+};
+
+const applyStyle = (self, string) => {
+ if (self.level <= 0 || !string) {
+ return self[IS_EMPTY] ? '' : string;
+ }
+
+ let styler = self[STYLER];
+
+ if (styler === undefined) {
+ return string;
+ }
+
+ const {openAll, closeAll} = styler;
+ if (string.includes('\u001B')) {
+ while (styler !== undefined) {
+ // Replace any instances already present with a re-opening code
+ // otherwise only the part of the string until said closing code
+ // will be colored, and the rest will simply be 'plain'.
+ string = stringReplaceAll(string, styler.close, styler.open);
+
+ styler = styler.parent;
+ }
+ }
+
+ // We can move both next actions out of loop, because remaining actions in loop won't have
+ // any/visible effect on parts we add here. Close the styling before a linebreak and reopen
+ // after next line to fix a bleed issue on macOS: https://github.com/chalk/chalk/pull/92
+ const lfIndex = string.indexOf('\n');
+ if (lfIndex !== -1) {
+ string = stringEncaseCRLFWithFirstIndex(string, closeAll, openAll, lfIndex);
+ }
+
+ return openAll + string + closeAll;
+};
+
+Object.defineProperties(createChalk.prototype, styles);
+
+const chalk = createChalk();
+export const chalkStderr = createChalk({level: stderrColor ? stderrColor.level : 0});
+
+export {
+ stdoutColor as supportsColor,
+ stderrColor as supportsColorStderr,
+};
+
+export default chalk;
diff --git a/node_modules/chalk/source/utilities.js b/node_modules/chalk/source/utilities.js
new file mode 100644
index 0000000..5d49dad
--- /dev/null
+++ b/node_modules/chalk/source/utilities.js
@@ -0,0 +1,33 @@
+// TODO: When targeting Node.js 16, use `String.prototype.replaceAll`.
+export function stringReplaceAll(string, substring, replacer) {
+ let index = string.indexOf(substring);
+ if (index === -1) {
+ return string;
+ }
+
+ const substringLength = substring.length;
+ let endIndex = 0;
+ let returnValue = '';
+ do {
+ returnValue += string.substr(endIndex, index - endIndex) + substring + replacer;
+ endIndex = index + substringLength;
+ index = string.indexOf(substring, endIndex);
+ } while (index !== -1);
+
+ returnValue += string.slice(endIndex);
+ return returnValue;
+}
+
+export function stringEncaseCRLFWithFirstIndex(string, prefix, postfix, index) {
+ let endIndex = 0;
+ let returnValue = '';
+ do {
+ const gotCR = string[index - 1] === '\r';
+ returnValue += string.substr(endIndex, (gotCR ? index - 1 : index) - endIndex) + prefix + (gotCR ? '\r\n' : '\n') + postfix;
+ endIndex = index + 1;
+ index = string.indexOf('\n', endIndex);
+ } while (index !== -1);
+
+ returnValue += string.slice(endIndex);
+ return returnValue;
+}
diff --git a/node_modules/chalk/source/vendor/ansi-styles/index.d.ts b/node_modules/chalk/source/vendor/ansi-styles/index.d.ts
new file mode 100644
index 0000000..7518d2a
--- /dev/null
+++ b/node_modules/chalk/source/vendor/ansi-styles/index.d.ts
@@ -0,0 +1,190 @@
+export interface CSPair { // eslint-disable-line @typescript-eslint/naming-convention
+ /**
+ The ANSI terminal control sequence for starting this style.
+ */
+ readonly open: string;
+
+ /**
+ The ANSI terminal control sequence for ending this style.
+ */
+ readonly close: string;
+}
+
+export interface ColorBase {
+ /**
+ The ANSI terminal control sequence for ending this color.
+ */
+ readonly close: string;
+
+ ansi(code: number): string;
+
+ ansi256(code: number): string;
+
+ ansi16m(red: number, green: number, blue: number): string;
+}
+
+export interface Modifier {
+ /**
+ Resets the current color chain.
+ */
+ readonly reset: CSPair;
+
+ /**
+ Make text bold.
+ */
+ readonly bold: CSPair;
+
+ /**
+ Emitting only a small amount of light.
+ */
+ readonly dim: CSPair;
+
+ /**
+ Make text italic. (Not widely supported)
+ */
+ readonly italic: CSPair;
+
+ /**
+ Make text underline. (Not widely supported)
+ */
+ readonly underline: CSPair;
+
+ /**
+ Make text overline.
+
+ Supported on VTE-based terminals, the GNOME terminal, mintty, and Git Bash.
+ */
+ readonly overline: CSPair;
+
+ /**
+ Inverse background and foreground colors.
+ */
+ readonly inverse: CSPair;
+
+ /**
+ Prints the text, but makes it invisible.
+ */
+ readonly hidden: CSPair;
+
+ /**
+ Puts a horizontal line through the center of the text. (Not widely supported)
+ */
+ readonly strikethrough: CSPair;
+}
+
+export interface ForegroundColor {
+ readonly black: CSPair;
+ readonly red: CSPair;
+ readonly green: CSPair;
+ readonly yellow: CSPair;
+ readonly blue: CSPair;
+ readonly cyan: CSPair;
+ readonly magenta: CSPair;
+ readonly white: CSPair;
+
+ /**
+ Alias for `blackBright`.
+ */
+ readonly gray: CSPair;
+
+ /**
+ Alias for `blackBright`.
+ */
+ readonly grey: CSPair;
+
+ readonly blackBright: CSPair;
+ readonly redBright: CSPair;
+ readonly greenBright: CSPair;
+ readonly yellowBright: CSPair;
+ readonly blueBright: CSPair;
+ readonly cyanBright: CSPair;
+ readonly magentaBright: CSPair;
+ readonly whiteBright: CSPair;
+}
+
+export interface BackgroundColor {
+ readonly bgBlack: CSPair;
+ readonly bgRed: CSPair;
+ readonly bgGreen: CSPair;
+ readonly bgYellow: CSPair;
+ readonly bgBlue: CSPair;
+ readonly bgCyan: CSPair;
+ readonly bgMagenta: CSPair;
+ readonly bgWhite: CSPair;
+
+ /**
+ Alias for `bgBlackBright`.
+ */
+ readonly bgGray: CSPair;
+
+ /**
+ Alias for `bgBlackBright`.
+ */
+ readonly bgGrey: CSPair;
+
+ readonly bgBlackBright: CSPair;
+ readonly bgRedBright: CSPair;
+ readonly bgGreenBright: CSPair;
+ readonly bgYellowBright: CSPair;
+ readonly bgBlueBright: CSPair;
+ readonly bgCyanBright: CSPair;
+ readonly bgMagentaBright: CSPair;
+ readonly bgWhiteBright: CSPair;
+}
+
+export interface ConvertColor {
+ /**
+ Convert from the RGB color space to the ANSI 256 color space.
+
+ @param red - (`0...255`)
+ @param green - (`0...255`)
+ @param blue - (`0...255`)
+ */
+ rgbToAnsi256(red: number, green: number, blue: number): number;
+
+ /**
+ Convert from the RGB HEX color space to the RGB color space.
+
+ @param hex - A hexadecimal string containing RGB data.
+ */
+ hexToRgb(hex: string): [red: number, green: number, blue: number];
+
+ /**
+ Convert from the RGB HEX color space to the ANSI 256 color space.
+
+ @param hex - A hexadecimal string containing RGB data.
+ */
+ hexToAnsi256(hex: string): number;
+
+ /**
+ Convert from the ANSI 256 color space to the ANSI 16 color space.
+
+ @param code - A number representing the ANSI 256 color.
+ */
+ ansi256ToAnsi(code: number): number;
+
+ /**
+ Convert from the RGB color space to the ANSI 16 color space.
+
+ @param red - (`0...255`)
+ @param green - (`0...255`)
+ @param blue - (`0...255`)
+ */
+ rgbToAnsi(red: number, green: number, blue: number): number;
+
+ /**
+ Convert from the RGB HEX color space to the ANSI 16 color space.
+
+ @param hex - A hexadecimal string containing RGB data.
+ */
+ hexToAnsi(hex: string): number;
+}
+
+declare const ansiStyles: {
+ readonly modifier: Modifier;
+ readonly color: ColorBase & ForegroundColor;
+ readonly bgColor: ColorBase & BackgroundColor;
+ readonly codes: ReadonlyMap<number, number>;
+} & ForegroundColor & BackgroundColor & Modifier & ConvertColor;
+
+export default ansiStyles;
diff --git a/node_modules/chalk/source/vendor/ansi-styles/index.js b/node_modules/chalk/source/vendor/ansi-styles/index.js
new file mode 100644
index 0000000..5746537
--- /dev/null
+++ b/node_modules/chalk/source/vendor/ansi-styles/index.js
@@ -0,0 +1,219 @@
+const ANSI_BACKGROUND_OFFSET = 10;
+
+const wrapAnsi16 = (offset = 0) => code => `\u001B[${code + offset}m`;
+
+const wrapAnsi256 = (offset = 0) => code => `\u001B[${38 + offset};5;${code}m`;
+
+const wrapAnsi16m = (offset = 0) => (red, green, blue) => `\u001B[${38 + offset};2;${red};${green};${blue}m`;
+
+function assembleStyles() {
+ const codes = new Map();
+ const styles = {
+ modifier: {
+ reset: [0, 0],
+ // 21 isn't widely supported and 22 does the same thing
+ bold: [1, 22],
+ dim: [2, 22],
+ italic: [3, 23],
+ underline: [4, 24],
+ overline: [53, 55],
+ inverse: [7, 27],
+ hidden: [8, 28],
+ strikethrough: [9, 29],
+ },
+ color: {
+ black: [30, 39],
+ red: [31, 39],
+ green: [32, 39],
+ yellow: [33, 39],
+ blue: [34, 39],
+ magenta: [35, 39],
+ cyan: [36, 39],
+ white: [37, 39],
+
+ // Bright color
+ blackBright: [90, 39],
+ redBright: [91, 39],
+ greenBright: [92, 39],
+ yellowBright: [93, 39],
+ blueBright: [94, 39],
+ magentaBright: [95, 39],
+ cyanBright: [96, 39],
+ whiteBright: [97, 39],
+ },
+ bgColor: {
+ bgBlack: [40, 49],
+ bgRed: [41, 49],
+ bgGreen: [42, 49],
+ bgYellow: [43, 49],
+ bgBlue: [44, 49],
+ bgMagenta: [45, 49],
+ bgCyan: [46, 49],
+ bgWhite: [47, 49],
+
+ // Bright color
+ bgBlackBright: [100, 49],
+ bgRedBright: [101, 49],
+ bgGreenBright: [102, 49],
+ bgYellowBright: [103, 49],
+ bgBlueBright: [104, 49],
+ bgMagentaBright: [105, 49],
+ bgCyanBright: [106, 49],
+ bgWhiteBright: [107, 49],
+ },
+ };
+
+ // Alias bright black as gray (and grey)
+ styles.color.gray = styles.color.blackBright;
+ styles.bgColor.bgGray = styles.bgColor.bgBlackBright;
+ styles.color.grey = styles.color.blackBright;
+ styles.bgColor.bgGrey = styles.bgColor.bgBlackBright;
+
+ for (const [groupName, group] of Object.entries(styles)) {
+ for (const [styleName, style] of Object.entries(group)) {
+ styles[styleName] = {
+ open: `\u001B[${style[0]}m`,
+ close: `\u001B[${style[1]}m`,
+ };
+
+ group[styleName] = styles[styleName];
+
+ codes.set(style[0], style[1]);
+ }
+
+ Object.defineProperty(styles, groupName, {
+ value: group,
+ enumerable: false,
+ });
+ }
+
+ Object.defineProperty(styles, 'codes', {
+ value: codes,
+ enumerable: false,
+ });
+
+ styles.color.close = '\u001B[39m';
+ styles.bgColor.close = '\u001B[49m';
+
+ styles.color.ansi = wrapAnsi16();
+ styles.color.ansi256 = wrapAnsi256();
+ styles.color.ansi16m = wrapAnsi16m();
+ styles.bgColor.ansi = wrapAnsi16(ANSI_BACKGROUND_OFFSET);
+ styles.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET);
+ styles.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET);
+
+ // From https://github.com/Qix-/color-convert/blob/3f0e0d4e92e235796ccb17f6e85c72094a651f49/conversions.js
+ Object.defineProperties(styles, {
+ rgbToAnsi256: {
+ value: (red, green, blue) => {
+ // We use the extended greyscale palette here, with the exception of
+ // black and white. normal palette only has 4 greyscale shades.
+ if (red === green && green === blue) {
+ if (red < 8) {
+ return 16;
+ }
+
+ if (red > 248) {
+ return 231;
+ }
+
+ return Math.round(((red - 8) / 247) * 24) + 232;
+ }
+
+ return 16
+ + (36 * Math.round(red / 255 * 5))
+ + (6 * Math.round(green / 255 * 5))
+ + Math.round(blue / 255 * 5);
+ },
+ enumerable: false,
+ },
+ hexToRgb: {
+ value: hex => {
+ const matches = /(?<colorString>[a-f\d]{6}|[a-f\d]{3})/i.exec(hex.toString(16));
+ if (!matches) {
+ return [0, 0, 0];
+ }
+
+ let {colorString} = matches.groups;
+
+ if (colorString.length === 3) {
+ colorString = [...colorString].map(character => character + character).join('');
+ }
+
+ const integer = Number.parseInt(colorString, 16);
+
+ return [
+ /* eslint-disable no-bitwise */
+ (integer >> 16) & 0xFF,
+ (integer >> 8) & 0xFF,
+ integer & 0xFF,
+ /* eslint-enable no-bitwise */
+ ];
+ },
+ enumerable: false,
+ },
+ hexToAnsi256: {
+ value: hex => styles.rgbToAnsi256(...styles.hexToRgb(hex)),
+ enumerable: false,
+ },
+ ansi256ToAnsi: {
+ value: code => {
+ if (code < 8) {
+ return 30 + code;
+ }
+
+ if (code < 16) {
+ return 90 + (code - 8);
+ }
+
+ let red;
+ let green;
+ let blue;
+
+ if (code >= 232) {
+ red = (((code - 232) * 10) + 8) / 255;
+ green = red;
+ blue = red;
+ } else {
+ code -= 16;
+
+ const remainder = code % 36;
+
+ red = Math.floor(code / 36) / 5;
+ green = Math.floor(remainder / 6) / 5;
+ blue = (remainder % 6) / 5;
+ }
+
+ const value = Math.max(red, green, blue) * 2;
+
+ if (value === 0) {
+ return 30;
+ }
+
+ // eslint-disable-next-line no-bitwise
+ let result = 30 + ((Math.round(blue) << 2) | (Math.round(green) << 1) | Math.round(red));
+
+ if (value === 2) {
+ result += 60;
+ }
+
+ return result;
+ },
+ enumerable: false,
+ },
+ rgbToAnsi: {
+ value: (red, green, blue) => styles.ansi256ToAnsi(styles.rgbToAnsi256(red, green, blue)),
+ enumerable: false,
+ },
+ hexToAnsi: {
+ value: hex => styles.ansi256ToAnsi(styles.hexToAnsi256(hex)),
+ enumerable: false,
+ },
+ });
+
+ return styles;
+}
+
+const ansiStyles = assembleStyles();
+
+export default ansiStyles;
diff --git a/node_modules/chalk/source/vendor/supports-color/browser.d.ts b/node_modules/chalk/source/vendor/supports-color/browser.d.ts
new file mode 100644
index 0000000..0e8a9cc
--- /dev/null
+++ b/node_modules/chalk/source/vendor/supports-color/browser.d.ts
@@ -0,0 +1 @@
+export {default} from './index.js';
diff --git a/node_modules/chalk/source/vendor/supports-color/browser.js b/node_modules/chalk/source/vendor/supports-color/browser.js
new file mode 100644
index 0000000..f4e9acd
--- /dev/null
+++ b/node_modules/chalk/source/vendor/supports-color/browser.js
@@ -0,0 +1,17 @@
+/* eslint-env browser */
+
+const isBlinkBasedBrowser = /\b(Chrome|Chromium)\//.test(navigator.userAgent);
+
+const colorSupport = isBlinkBasedBrowser ? {
+ level: 1,
+ hasBasic: true,
+ has256: false,
+ has16m: false,
+} : false;
+
+const supportsColor = {
+ stdout: colorSupport,
+ stderr: colorSupport,
+};
+
+export default supportsColor;
diff --git a/node_modules/chalk/source/vendor/supports-color/index.d.ts b/node_modules/chalk/source/vendor/supports-color/index.d.ts
new file mode 100644
index 0000000..98e3618
--- /dev/null
+++ b/node_modules/chalk/source/vendor/supports-color/index.d.ts
@@ -0,0 +1,55 @@
+import {WriteStream} from 'node:tty';
+
+export interface Options {
+ /**
+ Whether `process.argv` should be sniffed for `--color` and `--no-color` flags.
+
+ @default true
+ */
+ readonly sniffFlags?: boolean;
+}
+
+/**
+Levels:
+- `0` - All colors disabled.
+- `1` - Basic 16 colors support.
+- `2` - ANSI 256 colors support.
+- `3` - Truecolor 16 million colors support.
+*/
+export type ColorSupportLevel = 0 | 1 | 2 | 3;
+
+/**
+Detect whether the terminal supports color.
+*/
+export interface ColorSupport {
+ /**
+ The color level.
+ */
+ level: ColorSupportLevel;
+
+ /**
+ Whether basic 16 colors are supported.
+ */
+ hasBasic: boolean;
+
+ /**
+ Whether ANSI 256 colors are supported.
+ */
+ has256: boolean;
+
+ /**
+ Whether Truecolor 16 million colors are supported.
+ */
+ has16m: boolean;
+}
+
+export type ColorInfo = ColorSupport | false;
+
+export function createSupportsColor(stream: WriteStream, options?: Options): ColorInfo;
+
+declare const supportsColor: {
+ stdout: ColorInfo;
+ stderr: ColorInfo;
+};
+
+export default supportsColor;
diff --git a/node_modules/chalk/source/vendor/supports-color/index.js b/node_modules/chalk/source/vendor/supports-color/index.js
new file mode 100644
index 0000000..55f813c
--- /dev/null
+++ b/node_modules/chalk/source/vendor/supports-color/index.js
@@ -0,0 +1,169 @@
+import process from 'node:process';
+import os from 'node:os';
+import tty from 'node:tty';
+
+// From: https://github.com/sindresorhus/has-flag/blob/main/index.js
+function hasFlag(flag, argv = process.argv) {
+ const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');
+ const position = argv.indexOf(prefix + flag);
+ const terminatorPosition = argv.indexOf('--');
+ return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition);
+}
+
+const {env} = process;
+
+let flagForceColor;
+if (
+ hasFlag('no-color')
+ || hasFlag('no-colors')
+ || hasFlag('color=false')
+ || hasFlag('color=never')
+) {
+ flagForceColor = 0;
+} else if (
+ hasFlag('color')
+ || hasFlag('colors')
+ || hasFlag('color=true')
+ || hasFlag('color=always')
+) {
+ flagForceColor = 1;
+}
+
+function envForceColor() {
+ if ('FORCE_COLOR' in env) {
+ if (env.FORCE_COLOR === 'true') {
+ return 1;
+ }
+
+ if (env.FORCE_COLOR === 'false') {
+ return 0;
+ }
+
+ return env.FORCE_COLOR.length === 0 ? 1 : Math.min(Number.parseInt(env.FORCE_COLOR, 10), 3);
+ }
+}
+
+function translateLevel(level) {
+ if (level === 0) {
+ return false;
+ }
+
+ return {
+ level,
+ hasBasic: true,
+ has256: level >= 2,
+ has16m: level >= 3,
+ };
+}
+
+function _supportsColor(haveStream, {streamIsTTY, sniffFlags = true} = {}) {
+ const noFlagForceColor = envForceColor();
+ if (noFlagForceColor !== undefined) {
+ flagForceColor = noFlagForceColor;
+ }
+
+ const forceColor = sniffFlags ? flagForceColor : noFlagForceColor;
+
+ if (forceColor === 0) {
+ return 0;
+ }
+
+ if (sniffFlags) {
+ if (hasFlag('color=16m')
+ || hasFlag('color=full')
+ || hasFlag('color=truecolor')) {
+ return 3;
+ }
+
+ if (hasFlag('color=256')) {
+ return 2;
+ }
+ }
+
+ if (haveStream && !streamIsTTY && forceColor === undefined) {
+ return 0;
+ }
+
+ const min = forceColor || 0;
+
+ if (env.TERM === 'dumb') {
+ return min;
+ }
+
+ if (process.platform === 'win32') {
+ // Windows 10 build 10586 is the first Windows release that supports 256 colors.
+ // Windows 10 build 14931 is the first release that supports 16m/TrueColor.
+ const osRelease = os.release().split('.');
+ if (
+ Number(osRelease[0]) >= 10
+ && Number(osRelease[2]) >= 10_586
+ ) {
+ return Number(osRelease[2]) >= 14_931 ? 3 : 2;
+ }
+
+ return 1;
+ }
+
+ if ('CI' in env) {
+ if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE', 'DRONE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
+ return 1;
+ }
+
+ return min;
+ }
+
+ if ('TEAMCITY_VERSION' in env) {
+ return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;
+ }
+
+ // Check for Azure DevOps pipelines
+ if ('TF_BUILD' in env && 'AGENT_NAME' in env) {
+ return 1;
+ }
+
+ if (env.COLORTERM === 'truecolor') {
+ return 3;
+ }
+
+ if ('TERM_PROGRAM' in env) {
+ const version = Number.parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);
+
+ switch (env.TERM_PROGRAM) {
+ case 'iTerm.app':
+ return version >= 3 ? 3 : 2;
+ case 'Apple_Terminal':
+ return 2;
+ // No default
+ }
+ }
+
+ if (/-256(color)?$/i.test(env.TERM)) {
+ return 2;
+ }
+
+ if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {
+ return 1;
+ }
+
+ if ('COLORTERM' in env) {
+ return 1;
+ }
+
+ return min;
+}
+
+export function createSupportsColor(stream, options = {}) {
+ const level = _supportsColor(stream, {
+ streamIsTTY: stream && stream.isTTY,
+ ...options,
+ });
+
+ return translateLevel(level);
+}
+
+const supportsColor = {
+ stdout: createSupportsColor({isTTY: tty.isatty(1)}),
+ stderr: createSupportsColor({isTTY: tty.isatty(2)}),
+};
+
+export default supportsColor;
diff --git a/node_modules/uuid/CHANGELOG.md b/node_modules/uuid/CHANGELOG.md
new file mode 100644
index 0000000..7519d19
--- /dev/null
+++ b/node_modules/uuid/CHANGELOG.md
@@ -0,0 +1,229 @@
+# Changelog
+
+All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+
+### [8.3.2](https://github.com/uuidjs/uuid/compare/v8.3.1...v8.3.2) (2020-12-08)
+
+### Bug Fixes
+
+- lazy load getRandomValues ([#537](https://github.com/uuidjs/uuid/issues/537)) ([16c8f6d](https://github.com/uuidjs/uuid/commit/16c8f6df2f6b09b4d6235602d6a591188320a82e)), closes [#536](https://github.com/uuidjs/uuid/issues/536)
+
+### [8.3.1](https://github.com/uuidjs/uuid/compare/v8.3.0...v8.3.1) (2020-10-04)
+
+### Bug Fixes
+
+- support expo>=39.0.0 ([#515](https://github.com/uuidjs/uuid/issues/515)) ([c65a0f3](https://github.com/uuidjs/uuid/commit/c65a0f3fa73b901959d638d1e3591dfacdbed867)), closes [#375](https://github.com/uuidjs/uuid/issues/375)
+
+## [8.3.0](https://github.com/uuidjs/uuid/compare/v8.2.0...v8.3.0) (2020-07-27)
+
+### Features
+
+- add parse/stringify/validate/version/NIL APIs ([#479](https://github.com/uuidjs/uuid/issues/479)) ([0e6c10b](https://github.com/uuidjs/uuid/commit/0e6c10ba1bf9517796ff23c052fc0468eedfd5f4)), closes [#475](https://github.com/uuidjs/uuid/issues/475) [#478](https://github.com/uuidjs/uuid/issues/478) [#480](https://github.com/uuidjs/uuid/issues/480) [#481](https://github.com/uuidjs/uuid/issues/481) [#180](https://github.com/uuidjs/uuid/issues/180)
+
+## [8.2.0](https://github.com/uuidjs/uuid/compare/v8.1.0...v8.2.0) (2020-06-23)
+
+### Features
+
+- improve performance of v1 string representation ([#453](https://github.com/uuidjs/uuid/issues/453)) ([0ee0b67](https://github.com/uuidjs/uuid/commit/0ee0b67c37846529c66089880414d29f3ae132d5))
+- remove deprecated v4 string parameter ([#454](https://github.com/uuidjs/uuid/issues/454)) ([88ce3ca](https://github.com/uuidjs/uuid/commit/88ce3ca0ba046f60856de62c7ce03f7ba98ba46c)), closes [#437](https://github.com/uuidjs/uuid/issues/437)
+- support jspm ([#473](https://github.com/uuidjs/uuid/issues/473)) ([e9f2587](https://github.com/uuidjs/uuid/commit/e9f2587a92575cac31bc1d4ae944e17c09756659))
+
+### Bug Fixes
+
+- prepare package exports for webpack 5 ([#468](https://github.com/uuidjs/uuid/issues/468)) ([8d6e6a5](https://github.com/uuidjs/uuid/commit/8d6e6a5f8965ca9575eb4d92e99a43435f4a58a8))
+
+## [8.1.0](https://github.com/uuidjs/uuid/compare/v8.0.0...v8.1.0) (2020-05-20)
+
+### Features
+
+- improve v4 performance by reusing random number array ([#435](https://github.com/uuidjs/uuid/issues/435)) ([bf4af0d](https://github.com/uuidjs/uuid/commit/bf4af0d711b4d2ed03d1f74fd12ad0baa87dc79d))
+- optimize V8 performance of bytesToUuid ([#434](https://github.com/uuidjs/uuid/issues/434)) ([e156415](https://github.com/uuidjs/uuid/commit/e156415448ec1af2351fa0b6660cfb22581971f2))
+
+### Bug Fixes
+
+- export package.json required by react-native and bundlers ([#449](https://github.com/uuidjs/uuid/issues/449)) ([be1c8fe](https://github.com/uuidjs/uuid/commit/be1c8fe9a3206c358e0059b52fafd7213aa48a52)), closes [ai/nanoevents#44](https://github.com/ai/nanoevents/issues/44#issuecomment-602010343) [#444](https://github.com/uuidjs/uuid/issues/444)
+
+## [8.0.0](https://github.com/uuidjs/uuid/compare/v7.0.3...v8.0.0) (2020-04-29)
+
+### âš  BREAKING CHANGES
+
+- For native ECMAScript Module (ESM) usage in Node.js only named exports are exposed, there is no more default export.
+
+ ```diff
+ -import uuid from 'uuid';
+ -console.log(uuid.v4()); // -> 'cd6c3b08-0adc-4f4b-a6ef-36087a1c9869'
+ +import { v4 as uuidv4 } from 'uuid';
+ +uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d'
+ ```
+
+- Deep requiring specific algorithms of this library like `require('uuid/v4')`, which has been deprecated in `uuid@7`, is no longer supported.
+
+ Instead use the named exports that this module exports.
+
+ For ECMAScript Modules (ESM):
+
+ ```diff
+ -import uuidv4 from 'uuid/v4';
+ +import { v4 as uuidv4 } from 'uuid';
+ uuidv4();
+ ```
+
+ For CommonJS:
+
+ ```diff
+ -const uuidv4 = require('uuid/v4');
+ +const { v4: uuidv4 } = require('uuid');
+ uuidv4();
+ ```
+
+### Features
+
+- native Node.js ES Modules (wrapper approach) ([#423](https://github.com/uuidjs/uuid/issues/423)) ([2d9f590](https://github.com/uuidjs/uuid/commit/2d9f590ad9701d692625c07ed62f0a0f91227991)), closes [#245](https://github.com/uuidjs/uuid/issues/245) [#419](https://github.com/uuidjs/uuid/issues/419) [#342](https://github.com/uuidjs/uuid/issues/342)
+- remove deep requires ([#426](https://github.com/uuidjs/uuid/issues/426)) ([daf72b8](https://github.com/uuidjs/uuid/commit/daf72b84ceb20272a81bb5fbddb05dd95922cbba))
+
+### Bug Fixes
+
+- add CommonJS syntax example to README quickstart section ([#417](https://github.com/uuidjs/uuid/issues/417)) ([e0ec840](https://github.com/uuidjs/uuid/commit/e0ec8402c7ad44b7ef0453036c612f5db513fda0))
+
+### [7.0.3](https://github.com/uuidjs/uuid/compare/v7.0.2...v7.0.3) (2020-03-31)
+
+### Bug Fixes
+
+- make deep require deprecation warning work in browsers ([#409](https://github.com/uuidjs/uuid/issues/409)) ([4b71107](https://github.com/uuidjs/uuid/commit/4b71107d8c0d2ef56861ede6403fc9dc35a1e6bf)), closes [#408](https://github.com/uuidjs/uuid/issues/408)
+
+### [7.0.2](https://github.com/uuidjs/uuid/compare/v7.0.1...v7.0.2) (2020-03-04)
+
+### Bug Fixes
+
+- make access to msCrypto consistent ([#393](https://github.com/uuidjs/uuid/issues/393)) ([8bf2a20](https://github.com/uuidjs/uuid/commit/8bf2a20f3565df743da7215eebdbada9d2df118c))
+- simplify link in deprecation warning ([#391](https://github.com/uuidjs/uuid/issues/391)) ([bb2c8e4](https://github.com/uuidjs/uuid/commit/bb2c8e4e9f4c5f9c1eaaf3ea59710c633cd90cb7))
+- update links to match content in readme ([#386](https://github.com/uuidjs/uuid/issues/386)) ([44f2f86](https://github.com/uuidjs/uuid/commit/44f2f86e9d2bbf14ee5f0f00f72a3db1292666d4))
+
+### [7.0.1](https://github.com/uuidjs/uuid/compare/v7.0.0...v7.0.1) (2020-02-25)
+
+### Bug Fixes
+
+- clean up esm builds for node and browser ([#383](https://github.com/uuidjs/uuid/issues/383)) ([59e6a49](https://github.com/uuidjs/uuid/commit/59e6a49e7ce7b3e8fb0f3ee52b9daae72af467dc))
+- provide browser versions independent from module system ([#380](https://github.com/uuidjs/uuid/issues/380)) ([4344a22](https://github.com/uuidjs/uuid/commit/4344a22e7aed33be8627eeaaf05360f256a21753)), closes [#378](https://github.com/uuidjs/uuid/issues/378)
+
+## [7.0.0](https://github.com/uuidjs/uuid/compare/v3.4.0...v7.0.0) (2020-02-24)
+
+### âš  BREAKING CHANGES
+
+- The default export, which used to be the v4() method but which was already discouraged in v3.x of this library, has been removed.
+- Explicitly note that deep imports of the different uuid version functions are deprecated and no longer encouraged and that ECMAScript module named imports should be used instead. Emit a deprecation warning for people who deep-require the different algorithm variants.
+- Remove builtin support for insecure random number generators in the browser. Users who want that will have to supply their own random number generator function.
+- Remove support for generating v3 and v5 UUIDs in Node.js<4.x
+- Convert code base to ECMAScript Modules (ESM) and release CommonJS build for node and ESM build for browser bundlers.
+
+### Features
+
+- add UMD build to npm package ([#357](https://github.com/uuidjs/uuid/issues/357)) ([4e75adf](https://github.com/uuidjs/uuid/commit/4e75adf435196f28e3fbbe0185d654b5ded7ca2c)), closes [#345](https://github.com/uuidjs/uuid/issues/345)
+- add various es module and CommonJS examples ([b238510](https://github.com/uuidjs/uuid/commit/b238510bf352463521f74bab175a3af9b7a42555))
+- ensure that docs are up-to-date in CI ([ee5e77d](https://github.com/uuidjs/uuid/commit/ee5e77db547474f5a8f23d6c857a6d399209986b))
+- hybrid CommonJS & ECMAScript modules build ([a3f078f](https://github.com/uuidjs/uuid/commit/a3f078faa0baff69ab41aed08e041f8f9c8993d0))
+- remove insecure fallback random number generator ([3a5842b](https://github.com/uuidjs/uuid/commit/3a5842b141a6e5de0ae338f391661e6b84b167c9)), closes [#173](https://github.com/uuidjs/uuid/issues/173)
+- remove support for pre Node.js v4 Buffer API ([#356](https://github.com/uuidjs/uuid/issues/356)) ([b59b5c5](https://github.com/uuidjs/uuid/commit/b59b5c5ecad271c5453f1a156f011671f6d35627))
+- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([c37a518](https://github.com/uuidjs/uuid/commit/c37a518e367ac4b6d0aa62dba1bc6ce9e85020f7)), closes [#338](https://github.com/uuidjs/uuid/issues/338)
+
+### Bug Fixes
+
+- add deep-require proxies for local testing and adjust tests ([#365](https://github.com/uuidjs/uuid/issues/365)) ([7fedc79](https://github.com/uuidjs/uuid/commit/7fedc79ac8fda4bfd1c566c7f05ef4ac13b2db48))
+- add note about removal of default export ([#372](https://github.com/uuidjs/uuid/issues/372)) ([12749b7](https://github.com/uuidjs/uuid/commit/12749b700eb49db8a9759fd306d8be05dbfbd58c)), closes [#370](https://github.com/uuidjs/uuid/issues/370)
+- deprecated deep requiring of the different algorithm versions ([#361](https://github.com/uuidjs/uuid/issues/361)) ([c0bdf15](https://github.com/uuidjs/uuid/commit/c0bdf15e417639b1aeb0b247b2fb11f7a0a26b23))
+
+## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16)
+
+### Features
+
+- rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338)
+
+## [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19)
+
+### Bug Fixes
+
+- no longer run ci tests on node v4
+- upgrade dependencies
+
+## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28)
+
+### Bug Fixes
+
+- typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877))
+
+## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28)
+
+### Bug Fixes
+
+- fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2))
+
+# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22)
+
+### Bug Fixes
+
+- assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc))
+- fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4))
+- Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331))
+- mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c))
+
+### Features
+
+- enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182))
+
+## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16)
+
+### Bug Fixes
+
+- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b))
+
+# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16)
+
+### Bug Fixes
+
+- remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824))
+- use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b))
+
+### Features
+
+- Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726))
+
+# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17)
+
+### Bug Fixes
+
+- (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183)
+- Fix typo (#178)
+- Simple typo fix (#165)
+
+### Features
+
+- v5 support in CLI (#197)
+- V5 support (#188)
+
+# 3.0.1 (2016-11-28)
+
+- split uuid versions into separate files
+
+# 3.0.0 (2016-11-17)
+
+- remove .parse and .unparse
+
+# 2.0.0
+
+- Removed uuid.BufferClass
+
+# 1.4.0
+
+- Improved module context detection
+- Removed public RNG functions
+
+# 1.3.2
+
+- Improve tests and handling of v1() options (Issue #24)
+- Expose RNG option to allow for perf testing with different generators
+
+# 1.3.0
+
+- Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)!
+- Support for node.js crypto API
+- De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code
diff --git a/node_modules/uuid/CONTRIBUTING.md b/node_modules/uuid/CONTRIBUTING.md
new file mode 100644
index 0000000..4a4503d
--- /dev/null
+++ b/node_modules/uuid/CONTRIBUTING.md
@@ -0,0 +1,18 @@
+# Contributing
+
+Please feel free to file GitHub Issues or propose Pull Requests. We're always happy to discuss improvements to this library!
+
+## Testing
+
+```shell
+npm test
+```
+
+## Releasing
+
+Releases are supposed to be done from master, version bumping is automated through [`standard-version`](https://github.com/conventional-changelog/standard-version):
+
+```shell
+npm run release -- --dry-run # verify output manually
+npm run release # follow the instructions from the output of this command
+```
diff --git a/node_modules/uuid/LICENSE.md b/node_modules/uuid/LICENSE.md
new file mode 100644
index 0000000..3934168
--- /dev/null
+++ b/node_modules/uuid/LICENSE.md
@@ -0,0 +1,9 @@
+The MIT License (MIT)
+
+Copyright (c) 2010-2020 Robert Kieffer and other contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/uuid/README.md b/node_modules/uuid/README.md
new file mode 100644
index 0000000..ed27e57
--- /dev/null
+++ b/node_modules/uuid/README.md
@@ -0,0 +1,505 @@
+<!--
+ -- This file is auto-generated from README_js.md. Changes should be made there.
+ -->
+
+# uuid [![CI](https://github.com/uuidjs/uuid/workflows/CI/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ACI) [![Browser](https://github.com/uuidjs/uuid/workflows/Browser/badge.svg)](https://github.com/uuidjs/uuid/actions?query=workflow%3ABrowser)
+
+For the creation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDs
+
+- **Complete** - Support for RFC4122 version 1, 3, 4, and 5 UUIDs
+- **Cross-platform** - Support for ...
+ - CommonJS, [ECMAScript Modules](#ecmascript-modules) and [CDN builds](#cdn-builds)
+ - Node 8, 10, 12, 14
+ - Chrome, Safari, Firefox, Edge, IE 11 browsers
+ - Webpack and rollup.js module bundlers
+ - [React Native / Expo](#react-native--expo)
+- **Secure** - Cryptographically-strong random values
+- **Small** - Zero-dependency, small footprint, plays nice with "tree shaking" packagers
+- **CLI** - Includes the [`uuid` command line](#command-line) utility
+
+**Upgrading from `uuid@3.x`?** Your code is probably okay, but check out [Upgrading From `uuid@3.x`](#upgrading-from-uuid3x) for details.
+
+## Quickstart
+
+To create a random UUID...
+
+**1. Install**
+
+```shell
+npm install uuid
+```
+
+**2. Create a UUID** (ES6 module syntax)
+
+```javascript
+import { v4 as uuidv4 } from 'uuid';
+uuidv4(); // ⇨ '9b1deb4d-3b7d-4bad-9bdd-2b0d7b3dcb6d'
+```
+
+... or using CommonJS syntax:
+
+```javascript
+const { v4: uuidv4 } = require('uuid');
+uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed'
+```
+
+For timestamp UUIDs, namespace UUIDs, and other options read on ...
+
+## API Summary
+
+| | | |
+| --- | --- | --- |
+| [`uuid.NIL`](#uuidnil) | The nil UUID string (all zeros) | New in `uuid@8.3` |
+| [`uuid.parse()`](#uuidparsestr) | Convert UUID string to array of bytes | New in `uuid@8.3` |
+| [`uuid.stringify()`](#uuidstringifyarr-offset) | Convert array of bytes to UUID string | New in `uuid@8.3` |
+| [`uuid.v1()`](#uuidv1options-buffer-offset) | Create a version 1 (timestamp) UUID | |
+| [`uuid.v3()`](#uuidv3name-namespace-buffer-offset) | Create a version 3 (namespace w/ MD5) UUID | |
+| [`uuid.v4()`](#uuidv4options-buffer-offset) | Create a version 4 (random) UUID | |
+| [`uuid.v5()`](#uuidv5name-namespace-buffer-offset) | Create a version 5 (namespace w/ SHA-1) UUID | |
+| [`uuid.validate()`](#uuidvalidatestr) | Test a string to see if it is a valid UUID | New in `uuid@8.3` |
+| [`uuid.version()`](#uuidversionstr) | Detect RFC version of a UUID | New in `uuid@8.3` |
+
+## API
+
+### uuid.NIL
+
+The nil UUID string (all zeros).
+
+Example:
+
+```javascript
+import { NIL as NIL_UUID } from 'uuid';
+
+NIL_UUID; // ⇨ '00000000-0000-0000-0000-000000000000'
+```
+
+### uuid.parse(str)
+
+Convert UUID string to array of bytes
+
+| | |
+| --------- | ---------------------------------------- |
+| `str` | A valid UUID `String` |
+| _returns_ | `Uint8Array[16]` |
+| _throws_ | `TypeError` if `str` is not a valid UUID |
+
+Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left &Rarr; right order of hex-pairs in UUID strings. As shown in the example below.
+
+Example:
+
+```javascript
+import { parse as uuidParse } from 'uuid';
+
+// Parse a UUID
+const bytes = uuidParse('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b');
+
+// Convert to hex strings to show byte order (for documentation purposes)
+[...bytes].map((v) => v.toString(16).padStart(2, '0')); // ⇨
+ // [
+ // '6e', 'c0', 'bd', '7f',
+ // '11', 'c0', '43', 'da',
+ // '97', '5e', '2a', '8a',
+ // 'd9', 'eb', 'ae', '0b'
+ // ]
+```
+
+### uuid.stringify(arr[, offset])
+
+Convert array of bytes to UUID string
+
+| | |
+| -------------- | ---------------------------------------------------------------------------- |
+| `arr` | `Array`-like collection of 16 values (starting from `offset`) between 0-255. |
+| [`offset` = 0] | `Number` Starting index in the Array |
+| _returns_ | `String` |
+| _throws_ | `TypeError` if a valid UUID string cannot be generated |
+
+Note: Ordering of values in the byte arrays used by `parse()` and `stringify()` follows the left &Rarr; right order of hex-pairs in UUID strings. As shown in the example below.
+
+Example:
+
+```javascript
+import { stringify as uuidStringify } from 'uuid';
+
+const uuidBytes = [
+ 0x6e,
+ 0xc0,
+ 0xbd,
+ 0x7f,
+ 0x11,
+ 0xc0,
+ 0x43,
+ 0xda,
+ 0x97,
+ 0x5e,
+ 0x2a,
+ 0x8a,
+ 0xd9,
+ 0xeb,
+ 0xae,
+ 0x0b,
+];
+
+uuidStringify(uuidBytes); // ⇨ '6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'
+```
+
+### uuid.v1([options[, buffer[, offset]]])
+
+Create an RFC version 1 (timestamp) UUID
+
+| | |
+| --- | --- |
+| [`options`] | `Object` with one or more of the following properties: |
+| [`options.node` ] | RFC "node" field as an `Array[6]` of byte values (per 4.1.6) |
+| [`options.clockseq`] | RFC "clock sequence" as a `Number` between 0 - 0x3fff |
+| [`options.msecs`] | RFC "timestamp" field (`Number` of milliseconds, unix epoch) |
+| [`options.nsecs`] | RFC "timestamp" field (`Number` of nanseconds to add to `msecs`, should be 0-10,000) |
+| [`options.random`] | `Array` of 16 random bytes (0-255) |
+| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) |
+| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` |
+| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` |
+| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` |
+| _throws_ | `Error` if more than 10M UUIDs/sec are requested |
+
+Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process.
+
+Note: `options.random` and `options.rng` are only meaningful on the very first call to `v1()`, where they may be passed to initialize the internal `node` and `clockseq` fields.
+
+Example:
+
+```javascript
+import { v1 as uuidv1 } from 'uuid';
+
+uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d'
+```
+
+Example using `options`:
+
+```javascript
+import { v1 as uuidv1 } from 'uuid';
+
+const v1options = {
+ node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab],
+ clockseq: 0x1234,
+ msecs: new Date('2011-11-01').getTime(),
+ nsecs: 5678,
+};
+uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab'
+```
+
+### uuid.v3(name, namespace[, buffer[, offset]])
+
+Create an RFC version 3 (namespace w/ MD5) UUID
+
+API is identical to `v5()`, but uses "v3" instead.
+
+&#x26a0;&#xfe0f; Note: Per the RFC, "_If backward compatibility is not an issue, SHA-1 [Version 5] is preferred_."
+
+### uuid.v4([options[, buffer[, offset]]])
+
+Create an RFC version 4 (random) UUID
+
+| | |
+| --- | --- |
+| [`options`] | `Object` with one or more of the following properties: |
+| [`options.random`] | `Array` of 16 random bytes (0-255) |
+| [`options.rng`] | Alternative to `options.random`, a `Function` that returns an `Array` of 16 random bytes (0-255) |
+| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` |
+| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` |
+| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` |
+
+Example:
+
+```javascript
+import { v4 as uuidv4 } from 'uuid';
+
+uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed'
+```
+
+Example using predefined `random` values:
+
+```javascript
+import { v4 as uuidv4 } from 'uuid';
+
+const v4options = {
+ random: [
+ 0x10,
+ 0x91,
+ 0x56,
+ 0xbe,
+ 0xc4,
+ 0xfb,
+ 0xc1,
+ 0xea,
+ 0x71,
+ 0xb4,
+ 0xef,
+ 0xe1,
+ 0x67,
+ 0x1c,
+ 0x58,
+ 0x36,
+ ],
+};
+uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836'
+```
+
+### uuid.v5(name, namespace[, buffer[, offset]])
+
+Create an RFC version 5 (namespace w/ SHA-1) UUID
+
+| | |
+| --- | --- |
+| `name` | `String \| Array` |
+| `namespace` | `String \| Array[16]` Namespace UUID |
+| [`buffer`] | `Array \| Buffer` If specified, uuid will be written here in byte-form, starting at `offset` |
+| [`offset` = 0] | `Number` Index to start writing UUID bytes in `buffer` |
+| _returns_ | UUID `String` if no `buffer` is specified, otherwise returns `buffer` |
+
+Note: The RFC `DNS` and `URL` namespaces are available as `v5.DNS` and `v5.URL`.
+
+Example with custom namespace:
+
+```javascript
+import { v5 as uuidv5 } from 'uuid';
+
+// Define a custom namespace. Readers, create your own using something like
+// https://www.uuidgenerator.net/
+const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341';
+
+uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681'
+```
+
+Example with RFC `URL` namespace:
+
+```javascript
+import { v5 as uuidv5 } from 'uuid';
+
+uuidv5('https://www.w3.org/', uuidv5.URL); // ⇨ 'c106a26a-21bb-5538-8bf2-57095d1976c1'
+```
+
+### uuid.validate(str)
+
+Test a string to see if it is a valid UUID
+
+| | |
+| --------- | --------------------------------------------------- |
+| `str` | `String` to validate |
+| _returns_ | `true` if string is a valid UUID, `false` otherwise |
+
+Example:
+
+```javascript
+import { validate as uuidValidate } from 'uuid';
+
+uuidValidate('not a UUID'); // ⇨ false
+uuidValidate('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ true
+```
+
+Using `validate` and `version` together it is possible to do per-version validation, e.g. validate for only v4 UUIds.
+
+```javascript
+import { version as uuidVersion } from 'uuid';
+import { validate as uuidValidate } from 'uuid';
+
+function uuidValidateV4(uuid) {
+ return uuidValidate(uuid) && uuidVersion(uuid) === 4;
+}
+
+const v1Uuid = 'd9428888-122b-11e1-b85c-61cd3cbb3210';
+const v4Uuid = '109156be-c4fb-41ea-b1b4-efe1671c5836';
+
+uuidValidateV4(v4Uuid); // ⇨ true
+uuidValidateV4(v1Uuid); // ⇨ false
+```
+
+### uuid.version(str)
+
+Detect RFC version of a UUID
+
+| | |
+| --------- | ---------------------------------------- |
+| `str` | A valid UUID `String` |
+| _returns_ | `Number` The RFC version of the UUID |
+| _throws_ | `TypeError` if `str` is not a valid UUID |
+
+Example:
+
+```javascript
+import { version as uuidVersion } from 'uuid';
+
+uuidVersion('45637ec4-c85f-11ea-87d0-0242ac130003'); // ⇨ 1
+uuidVersion('6ec0bd7f-11c0-43da-975e-2a8ad9ebae0b'); // ⇨ 4
+```
+
+## Command Line
+
+UUIDs can be generated from the command line using `uuid`.
+
+```shell
+$ uuid
+ddeb27fb-d9a0-4624-be4d-4615062daed4
+```
+
+The default is to generate version 4 UUIDS, however the other versions are supported. Type `uuid --help` for details:
+
+```shell
+$ uuid --help
+
+Usage:
+ uuid
+ uuid v1
+ uuid v3 <name> <namespace uuid>
+ uuid v4
+ uuid v5 <name> <namespace uuid>
+ uuid --help
+
+Note: <namespace uuid> may be "URL" or "DNS" to use the corresponding UUIDs
+defined by RFC4122
+```
+
+## ECMAScript Modules
+
+This library comes with [ECMAScript Modules](https://www.ecma-international.org/ecma-262/6.0/#sec-modules) (ESM) support for Node.js versions that support it ([example](./examples/node-esmodules/)) as well as bundlers like [rollup.js](https://rollupjs.org/guide/en/#tree-shaking) ([example](./examples/browser-rollup/)) and [webpack](https://webpack.js.org/guides/tree-shaking/) ([example](./examples/browser-webpack/)) (targeting both, Node.js and browser environments).
+
+```javascript
+import { v4 as uuidv4 } from 'uuid';
+uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed'
+```
+
+To run the examples you must first create a dist build of this library in the module root:
+
+```shell
+npm run build
+```
+
+## CDN Builds
+
+### ECMAScript Modules
+
+To load this module directly into modern browsers that [support loading ECMAScript Modules](https://caniuse.com/#feat=es6-module) you can make use of [jspm](https://jspm.org/):
+
+```html
+<script type="module">
+ import { v4 as uuidv4 } from 'https://jspm.dev/uuid';
+ console.log(uuidv4()); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed'
+</script>
+```
+
+### UMD
+
+To load this module directly into older browsers you can use the [UMD (Universal Module Definition)](https://github.com/umdjs/umd) builds from any of the following CDNs:
+
+**Using [UNPKG](https://unpkg.com/uuid@latest/dist/umd/)**:
+
+```html
+<script src="https://unpkg.com/uuid@latest/dist/umd/uuidv4.min.js"></script>
+```
+
+**Using [jsDelivr](https://cdn.jsdelivr.net/npm/uuid@latest/dist/umd/)**:
+
+```html
+<script src="https://cdn.jsdelivr.net/npm/uuid@latest/dist/umd/uuidv4.min.js"></script>
+```
+
+**Using [cdnjs](https://cdnjs.com/libraries/uuid)**:
+
+```html
+<script src="https://cdnjs.cloudflare.com/ajax/libs/uuid/8.1.0/uuidv4.min.js"></script>
+```
+
+These CDNs all provide the same [`uuidv4()`](#uuidv4options-buffer-offset) method:
+
+```html
+<script>
+ uuidv4(); // ⇨ '55af1e37-0734-46d8-b070-a1e42e4fc392'
+</script>
+```
+
+Methods for the other algorithms ([`uuidv1()`](#uuidv1options-buffer-offset), [`uuidv3()`](#uuidv3name-namespace-buffer-offset) and [`uuidv5()`](#uuidv5name-namespace-buffer-offset)) are available from the files `uuidv1.min.js`, `uuidv3.min.js` and `uuidv5.min.js` respectively.
+
+## "getRandomValues() not supported"
+
+This error occurs in environments where the standard [`crypto.getRandomValues()`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto/getRandomValues) API is not supported. This issue can be resolved by adding an appropriate polyfill:
+
+### React Native / Expo
+
+1. Install [`react-native-get-random-values`](https://github.com/LinusU/react-native-get-random-values#readme)
+1. Import it _before_ `uuid`. Since `uuid` might also appear as a transitive dependency of some other imports it's safest to just import `react-native-get-random-values` as the very first thing in your entry point:
+
+```javascript
+import 'react-native-get-random-values';
+import { v4 as uuidv4 } from 'uuid';
+```
+
+Note: If you are using Expo, you must be using at least `react-native-get-random-values@1.5.0` and `expo@39.0.0`.
+
+### Web Workers / Service Workers (Edge <= 18)
+
+[In Edge <= 18, Web Crypto is not supported in Web Workers or Service Workers](https://caniuse.com/#feat=cryptography) and we are not aware of a polyfill (let us know if you find one, please).
+
+## Upgrading From `uuid@7.x`
+
+### Only Named Exports Supported When Using with Node.js ESM
+
+`uuid@7.x` did not come with native ECMAScript Module (ESM) support for Node.js. Importing it in Node.js ESM consequently imported the CommonJS source with a default export. This library now comes with true Node.js ESM support and only provides named exports.
+
+Instead of doing:
+
+```javascript
+import uuid from 'uuid';
+uuid.v4();
+```
+
+you will now have to use the named exports:
+
+```javascript
+import { v4 as uuidv4 } from 'uuid';
+uuidv4();
+```
+
+### Deep Requires No Longer Supported
+
+Deep requires like `require('uuid/v4')` [which have been deprecated in `uuid@7.x`](#deep-requires-now-deprecated) are no longer supported.
+
+## Upgrading From `uuid@3.x`
+
+"_Wait... what happened to `uuid@4.x` - `uuid@6.x`?!?_"
+
+In order to avoid confusion with RFC [version 4](#uuidv4options-buffer-offset) and [version 5](#uuidv5name-namespace-buffer-offset) UUIDs, and a possible [version 6](http://gh.peabody.io/uuidv6/), releases 4 thru 6 of this module have been skipped.
+
+### Deep Requires Now Deprecated
+
+`uuid@3.x` encouraged the use of deep requires to minimize the bundle size of browser builds:
+
+```javascript
+const uuidv4 = require('uuid/v4'); // <== NOW DEPRECATED!
+uuidv4();
+```
+
+As of `uuid@7.x` this library now provides ECMAScript modules builds, which allow packagers like Webpack and Rollup to do "tree-shaking" to remove dead code. Instead, use the `import` syntax:
+
+```javascript
+import { v4 as uuidv4 } from 'uuid';
+uuidv4();
+```
+
+... or for CommonJS:
+
+```javascript
+const { v4: uuidv4 } = require('uuid');
+uuidv4();
+```
+
+### Default Export Removed
+
+`uuid@3.x` was exporting the Version 4 UUID method as a default export:
+
+```javascript
+const uuid = require('uuid'); // <== REMOVED!
+```
+
+This usage pattern was already discouraged in `uuid@3.x` and has been removed in `uuid@7.x`.
+
+----
+Markdown generated from [README_js.md](README_js.md) by [![RunMD Logo](http://i.imgur.com/h0FVyzU.png)](https://github.com/broofa/runmd) \ No newline at end of file
diff --git a/node_modules/uuid/dist/bin/uuid b/node_modules/uuid/dist/bin/uuid
new file mode 100755
index 0000000..f38d2ee
--- /dev/null
+++ b/node_modules/uuid/dist/bin/uuid
@@ -0,0 +1,2 @@
+#!/usr/bin/env node
+require('../uuid-bin');
diff --git a/node_modules/uuid/dist/esm-browser/index.js b/node_modules/uuid/dist/esm-browser/index.js
new file mode 100644
index 0000000..1db6f6d
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/index.js
@@ -0,0 +1,9 @@
+export { default as v1 } from './v1.js';
+export { default as v3 } from './v3.js';
+export { default as v4 } from './v4.js';
+export { default as v5 } from './v5.js';
+export { default as NIL } from './nil.js';
+export { default as version } from './version.js';
+export { default as validate } from './validate.js';
+export { default as stringify } from './stringify.js';
+export { default as parse } from './parse.js'; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/md5.js b/node_modules/uuid/dist/esm-browser/md5.js
new file mode 100644
index 0000000..8b5d46a
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/md5.js
@@ -0,0 +1,215 @@
+/*
+ * Browser-compatible JavaScript MD5
+ *
+ * Modification of JavaScript MD5
+ * https://github.com/blueimp/JavaScript-MD5
+ *
+ * Copyright 2011, Sebastian Tschan
+ * https://blueimp.net
+ *
+ * Licensed under the MIT license:
+ * https://opensource.org/licenses/MIT
+ *
+ * Based on
+ * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
+ * Digest Algorithm, as defined in RFC 1321.
+ * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009
+ * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
+ * Distributed under the BSD License
+ * See http://pajhome.org.uk/crypt/md5 for more info.
+ */
+function md5(bytes) {
+ if (typeof bytes === 'string') {
+ var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
+
+ bytes = new Uint8Array(msg.length);
+
+ for (var i = 0; i < msg.length; ++i) {
+ bytes[i] = msg.charCodeAt(i);
+ }
+ }
+
+ return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8));
+}
+/*
+ * Convert an array of little-endian words to an array of bytes
+ */
+
+
+function md5ToHexEncodedArray(input) {
+ var output = [];
+ var length32 = input.length * 32;
+ var hexTab = '0123456789abcdef';
+
+ for (var i = 0; i < length32; i += 8) {
+ var x = input[i >> 5] >>> i % 32 & 0xff;
+ var hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16);
+ output.push(hex);
+ }
+
+ return output;
+}
+/**
+ * Calculate output length with padding and bit length
+ */
+
+
+function getOutputLength(inputLength8) {
+ return (inputLength8 + 64 >>> 9 << 4) + 14 + 1;
+}
+/*
+ * Calculate the MD5 of an array of little-endian words, and a bit length.
+ */
+
+
+function wordsToMd5(x, len) {
+ /* append padding */
+ x[len >> 5] |= 0x80 << len % 32;
+ x[getOutputLength(len) - 1] = len;
+ var a = 1732584193;
+ var b = -271733879;
+ var c = -1732584194;
+ var d = 271733878;
+
+ for (var i = 0; i < x.length; i += 16) {
+ var olda = a;
+ var oldb = b;
+ var oldc = c;
+ var oldd = d;
+ a = md5ff(a, b, c, d, x[i], 7, -680876936);
+ d = md5ff(d, a, b, c, x[i + 1], 12, -389564586);
+ c = md5ff(c, d, a, b, x[i + 2], 17, 606105819);
+ b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330);
+ a = md5ff(a, b, c, d, x[i + 4], 7, -176418897);
+ d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426);
+ c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341);
+ b = md5ff(b, c, d, a, x[i + 7], 22, -45705983);
+ a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416);
+ d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417);
+ c = md5ff(c, d, a, b, x[i + 10], 17, -42063);
+ b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162);
+ a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682);
+ d = md5ff(d, a, b, c, x[i + 13], 12, -40341101);
+ c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290);
+ b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329);
+ a = md5gg(a, b, c, d, x[i + 1], 5, -165796510);
+ d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632);
+ c = md5gg(c, d, a, b, x[i + 11], 14, 643717713);
+ b = md5gg(b, c, d, a, x[i], 20, -373897302);
+ a = md5gg(a, b, c, d, x[i + 5], 5, -701558691);
+ d = md5gg(d, a, b, c, x[i + 10], 9, 38016083);
+ c = md5gg(c, d, a, b, x[i + 15], 14, -660478335);
+ b = md5gg(b, c, d, a, x[i + 4], 20, -405537848);
+ a = md5gg(a, b, c, d, x[i + 9], 5, 568446438);
+ d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690);
+ c = md5gg(c, d, a, b, x[i + 3], 14, -187363961);
+ b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501);
+ a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467);
+ d = md5gg(d, a, b, c, x[i + 2], 9, -51403784);
+ c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473);
+ b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734);
+ a = md5hh(a, b, c, d, x[i + 5], 4, -378558);
+ d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463);
+ c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562);
+ b = md5hh(b, c, d, a, x[i + 14], 23, -35309556);
+ a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060);
+ d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353);
+ c = md5hh(c, d, a, b, x[i + 7], 16, -155497632);
+ b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640);
+ a = md5hh(a, b, c, d, x[i + 13], 4, 681279174);
+ d = md5hh(d, a, b, c, x[i], 11, -358537222);
+ c = md5hh(c, d, a, b, x[i + 3], 16, -722521979);
+ b = md5hh(b, c, d, a, x[i + 6], 23, 76029189);
+ a = md5hh(a, b, c, d, x[i + 9], 4, -640364487);
+ d = md5hh(d, a, b, c, x[i + 12], 11, -421815835);
+ c = md5hh(c, d, a, b, x[i + 15], 16, 530742520);
+ b = md5hh(b, c, d, a, x[i + 2], 23, -995338651);
+ a = md5ii(a, b, c, d, x[i], 6, -198630844);
+ d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415);
+ c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905);
+ b = md5ii(b, c, d, a, x[i + 5], 21, -57434055);
+ a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571);
+ d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606);
+ c = md5ii(c, d, a, b, x[i + 10], 15, -1051523);
+ b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799);
+ a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359);
+ d = md5ii(d, a, b, c, x[i + 15], 10, -30611744);
+ c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380);
+ b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649);
+ a = md5ii(a, b, c, d, x[i + 4], 6, -145523070);
+ d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379);
+ c = md5ii(c, d, a, b, x[i + 2], 15, 718787259);
+ b = md5ii(b, c, d, a, x[i + 9], 21, -343485551);
+ a = safeAdd(a, olda);
+ b = safeAdd(b, oldb);
+ c = safeAdd(c, oldc);
+ d = safeAdd(d, oldd);
+ }
+
+ return [a, b, c, d];
+}
+/*
+ * Convert an array bytes to an array of little-endian words
+ * Characters >255 have their high-byte silently ignored.
+ */
+
+
+function bytesToWords(input) {
+ if (input.length === 0) {
+ return [];
+ }
+
+ var length8 = input.length * 8;
+ var output = new Uint32Array(getOutputLength(length8));
+
+ for (var i = 0; i < length8; i += 8) {
+ output[i >> 5] |= (input[i / 8] & 0xff) << i % 32;
+ }
+
+ return output;
+}
+/*
+ * Add integers, wrapping at 2^32. This uses 16-bit operations internally
+ * to work around bugs in some JS interpreters.
+ */
+
+
+function safeAdd(x, y) {
+ var lsw = (x & 0xffff) + (y & 0xffff);
+ var msw = (x >> 16) + (y >> 16) + (lsw >> 16);
+ return msw << 16 | lsw & 0xffff;
+}
+/*
+ * Bitwise rotate a 32-bit number to the left.
+ */
+
+
+function bitRotateLeft(num, cnt) {
+ return num << cnt | num >>> 32 - cnt;
+}
+/*
+ * These functions implement the four basic operations the algorithm uses.
+ */
+
+
+function md5cmn(q, a, b, x, s, t) {
+ return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b);
+}
+
+function md5ff(a, b, c, d, x, s, t) {
+ return md5cmn(b & c | ~b & d, a, b, x, s, t);
+}
+
+function md5gg(a, b, c, d, x, s, t) {
+ return md5cmn(b & d | c & ~d, a, b, x, s, t);
+}
+
+function md5hh(a, b, c, d, x, s, t) {
+ return md5cmn(b ^ c ^ d, a, b, x, s, t);
+}
+
+function md5ii(a, b, c, d, x, s, t) {
+ return md5cmn(c ^ (b | ~d), a, b, x, s, t);
+}
+
+export default md5; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/nil.js b/node_modules/uuid/dist/esm-browser/nil.js
new file mode 100644
index 0000000..b36324c
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/nil.js
@@ -0,0 +1 @@
+export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/parse.js b/node_modules/uuid/dist/esm-browser/parse.js
new file mode 100644
index 0000000..7c5b1d5
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/parse.js
@@ -0,0 +1,35 @@
+import validate from './validate.js';
+
+function parse(uuid) {
+ if (!validate(uuid)) {
+ throw TypeError('Invalid UUID');
+ }
+
+ var v;
+ var arr = new Uint8Array(16); // Parse ########-....-....-....-............
+
+ arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
+ arr[1] = v >>> 16 & 0xff;
+ arr[2] = v >>> 8 & 0xff;
+ arr[3] = v & 0xff; // Parse ........-####-....-....-............
+
+ arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
+ arr[5] = v & 0xff; // Parse ........-....-####-....-............
+
+ arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
+ arr[7] = v & 0xff; // Parse ........-....-....-####-............
+
+ arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
+ arr[9] = v & 0xff; // Parse ........-....-....-....-############
+ // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
+
+ arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
+ arr[11] = v / 0x100000000 & 0xff;
+ arr[12] = v >>> 24 & 0xff;
+ arr[13] = v >>> 16 & 0xff;
+ arr[14] = v >>> 8 & 0xff;
+ arr[15] = v & 0xff;
+ return arr;
+}
+
+export default parse; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/regex.js b/node_modules/uuid/dist/esm-browser/regex.js
new file mode 100644
index 0000000..3da8673
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/regex.js
@@ -0,0 +1 @@
+export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/rng.js b/node_modules/uuid/dist/esm-browser/rng.js
new file mode 100644
index 0000000..8abbf2e
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/rng.js
@@ -0,0 +1,19 @@
+// Unique ID creation requires a high quality random # generator. In the browser we therefore
+// require the crypto API and do not support built-in fallback to lower quality random number
+// generators (like Math.random()).
+var getRandomValues;
+var rnds8 = new Uint8Array(16);
+export default function rng() {
+ // lazy load so that environments that need to polyfill have a chance to do so
+ if (!getRandomValues) {
+ // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also,
+ // find the complete implementation of crypto (msCrypto) on IE11.
+ getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto);
+
+ if (!getRandomValues) {
+ throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported');
+ }
+ }
+
+ return getRandomValues(rnds8);
+} \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/sha1.js b/node_modules/uuid/dist/esm-browser/sha1.js
new file mode 100644
index 0000000..940548b
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/sha1.js
@@ -0,0 +1,96 @@
+// Adapted from Chris Veness' SHA1 code at
+// http://www.movable-type.co.uk/scripts/sha1.html
+function f(s, x, y, z) {
+ switch (s) {
+ case 0:
+ return x & y ^ ~x & z;
+
+ case 1:
+ return x ^ y ^ z;
+
+ case 2:
+ return x & y ^ x & z ^ y & z;
+
+ case 3:
+ return x ^ y ^ z;
+ }
+}
+
+function ROTL(x, n) {
+ return x << n | x >>> 32 - n;
+}
+
+function sha1(bytes) {
+ var K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6];
+ var H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0];
+
+ if (typeof bytes === 'string') {
+ var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
+
+ bytes = [];
+
+ for (var i = 0; i < msg.length; ++i) {
+ bytes.push(msg.charCodeAt(i));
+ }
+ } else if (!Array.isArray(bytes)) {
+ // Convert Array-like to Array
+ bytes = Array.prototype.slice.call(bytes);
+ }
+
+ bytes.push(0x80);
+ var l = bytes.length / 4 + 2;
+ var N = Math.ceil(l / 16);
+ var M = new Array(N);
+
+ for (var _i = 0; _i < N; ++_i) {
+ var arr = new Uint32Array(16);
+
+ for (var j = 0; j < 16; ++j) {
+ arr[j] = bytes[_i * 64 + j * 4] << 24 | bytes[_i * 64 + j * 4 + 1] << 16 | bytes[_i * 64 + j * 4 + 2] << 8 | bytes[_i * 64 + j * 4 + 3];
+ }
+
+ M[_i] = arr;
+ }
+
+ M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32);
+ M[N - 1][14] = Math.floor(M[N - 1][14]);
+ M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff;
+
+ for (var _i2 = 0; _i2 < N; ++_i2) {
+ var W = new Uint32Array(80);
+
+ for (var t = 0; t < 16; ++t) {
+ W[t] = M[_i2][t];
+ }
+
+ for (var _t = 16; _t < 80; ++_t) {
+ W[_t] = ROTL(W[_t - 3] ^ W[_t - 8] ^ W[_t - 14] ^ W[_t - 16], 1);
+ }
+
+ var a = H[0];
+ var b = H[1];
+ var c = H[2];
+ var d = H[3];
+ var e = H[4];
+
+ for (var _t2 = 0; _t2 < 80; ++_t2) {
+ var s = Math.floor(_t2 / 20);
+ var T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[_t2] >>> 0;
+ e = d;
+ d = c;
+ c = ROTL(b, 30) >>> 0;
+ b = a;
+ a = T;
+ }
+
+ H[0] = H[0] + a >>> 0;
+ H[1] = H[1] + b >>> 0;
+ H[2] = H[2] + c >>> 0;
+ H[3] = H[3] + d >>> 0;
+ H[4] = H[4] + e >>> 0;
+ }
+
+ return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff];
+}
+
+export default sha1; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/stringify.js b/node_modules/uuid/dist/esm-browser/stringify.js
new file mode 100644
index 0000000..3102111
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/stringify.js
@@ -0,0 +1,30 @@
+import validate from './validate.js';
+/**
+ * Convert array of 16 byte values to UUID string format of the form:
+ * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
+ */
+
+var byteToHex = [];
+
+for (var i = 0; i < 256; ++i) {
+ byteToHex.push((i + 0x100).toString(16).substr(1));
+}
+
+function stringify(arr) {
+ var offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
+ // Note: Be careful editing this code! It's been tuned for performance
+ // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
+ var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one
+ // of the following:
+ // - One or more input array values don't map to a hex octet (leading to
+ // "undefined" in the uuid)
+ // - Invalid input values for the RFC `version` or `variant` fields
+
+ if (!validate(uuid)) {
+ throw TypeError('Stringified UUID is invalid');
+ }
+
+ return uuid;
+}
+
+export default stringify; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/v1.js b/node_modules/uuid/dist/esm-browser/v1.js
new file mode 100644
index 0000000..1a22591
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/v1.js
@@ -0,0 +1,95 @@
+import rng from './rng.js';
+import stringify from './stringify.js'; // **`v1()` - Generate time-based UUID**
+//
+// Inspired by https://github.com/LiosK/UUID.js
+// and http://docs.python.org/library/uuid.html
+
+var _nodeId;
+
+var _clockseq; // Previous uuid creation time
+
+
+var _lastMSecs = 0;
+var _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
+
+function v1(options, buf, offset) {
+ var i = buf && offset || 0;
+ var b = buf || new Array(16);
+ options = options || {};
+ var node = options.node || _nodeId;
+ var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
+ // specified. We do this lazily to minimize issues related to insufficient
+ // system entropy. See #189
+
+ if (node == null || clockseq == null) {
+ var seedBytes = options.random || (options.rng || rng)();
+
+ if (node == null) {
+ // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
+ node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
+ }
+
+ if (clockseq == null) {
+ // Per 4.2.2, randomize (14 bit) clockseq
+ clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
+ }
+ } // UUID timestamps are 100 nano-second units since the Gregorian epoch,
+ // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
+ // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
+ // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
+
+
+ var msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
+ // cycle to simulate higher resolution clock
+
+ var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
+
+ var dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
+
+ if (dt < 0 && options.clockseq === undefined) {
+ clockseq = clockseq + 1 & 0x3fff;
+ } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
+ // time interval
+
+
+ if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
+ nsecs = 0;
+ } // Per 4.2.1.2 Throw error if too many uuids are requested
+
+
+ if (nsecs >= 10000) {
+ throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
+ }
+
+ _lastMSecs = msecs;
+ _lastNSecs = nsecs;
+ _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
+
+ msecs += 12219292800000; // `time_low`
+
+ var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
+ b[i++] = tl >>> 24 & 0xff;
+ b[i++] = tl >>> 16 & 0xff;
+ b[i++] = tl >>> 8 & 0xff;
+ b[i++] = tl & 0xff; // `time_mid`
+
+ var tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
+ b[i++] = tmh >>> 8 & 0xff;
+ b[i++] = tmh & 0xff; // `time_high_and_version`
+
+ b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
+
+ b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
+
+ b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
+
+ b[i++] = clockseq & 0xff; // `node`
+
+ for (var n = 0; n < 6; ++n) {
+ b[i + n] = node[n];
+ }
+
+ return buf || stringify(b);
+}
+
+export default v1; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/v3.js b/node_modules/uuid/dist/esm-browser/v3.js
new file mode 100644
index 0000000..c9ab9a4
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/v3.js
@@ -0,0 +1,4 @@
+import v35 from './v35.js';
+import md5 from './md5.js';
+var v3 = v35('v3', 0x30, md5);
+export default v3; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/v35.js b/node_modules/uuid/dist/esm-browser/v35.js
new file mode 100644
index 0000000..31dd8a1
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/v35.js
@@ -0,0 +1,64 @@
+import stringify from './stringify.js';
+import parse from './parse.js';
+
+function stringToBytes(str) {
+ str = unescape(encodeURIComponent(str)); // UTF8 escape
+
+ var bytes = [];
+
+ for (var i = 0; i < str.length; ++i) {
+ bytes.push(str.charCodeAt(i));
+ }
+
+ return bytes;
+}
+
+export var DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
+export var URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
+export default function (name, version, hashfunc) {
+ function generateUUID(value, namespace, buf, offset) {
+ if (typeof value === 'string') {
+ value = stringToBytes(value);
+ }
+
+ if (typeof namespace === 'string') {
+ namespace = parse(namespace);
+ }
+
+ if (namespace.length !== 16) {
+ throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
+ } // Compute hash of namespace and value, Per 4.3
+ // Future: Use spread syntax when supported on all platforms, e.g. `bytes =
+ // hashfunc([...namespace, ... value])`
+
+
+ var bytes = new Uint8Array(16 + value.length);
+ bytes.set(namespace);
+ bytes.set(value, namespace.length);
+ bytes = hashfunc(bytes);
+ bytes[6] = bytes[6] & 0x0f | version;
+ bytes[8] = bytes[8] & 0x3f | 0x80;
+
+ if (buf) {
+ offset = offset || 0;
+
+ for (var i = 0; i < 16; ++i) {
+ buf[offset + i] = bytes[i];
+ }
+
+ return buf;
+ }
+
+ return stringify(bytes);
+ } // Function#name is not settable on some platforms (#270)
+
+
+ try {
+ generateUUID.name = name; // eslint-disable-next-line no-empty
+ } catch (err) {} // For CommonJS default export support
+
+
+ generateUUID.DNS = DNS;
+ generateUUID.URL = URL;
+ return generateUUID;
+} \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/v4.js b/node_modules/uuid/dist/esm-browser/v4.js
new file mode 100644
index 0000000..404810a
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/v4.js
@@ -0,0 +1,24 @@
+import rng from './rng.js';
+import stringify from './stringify.js';
+
+function v4(options, buf, offset) {
+ options = options || {};
+ var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
+
+ rnds[6] = rnds[6] & 0x0f | 0x40;
+ rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
+
+ if (buf) {
+ offset = offset || 0;
+
+ for (var i = 0; i < 16; ++i) {
+ buf[offset + i] = rnds[i];
+ }
+
+ return buf;
+ }
+
+ return stringify(rnds);
+}
+
+export default v4; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/v5.js b/node_modules/uuid/dist/esm-browser/v5.js
new file mode 100644
index 0000000..c08d96b
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/v5.js
@@ -0,0 +1,4 @@
+import v35 from './v35.js';
+import sha1 from './sha1.js';
+var v5 = v35('v5', 0x50, sha1);
+export default v5; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/validate.js b/node_modules/uuid/dist/esm-browser/validate.js
new file mode 100644
index 0000000..f1cdc7a
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/validate.js
@@ -0,0 +1,7 @@
+import REGEX from './regex.js';
+
+function validate(uuid) {
+ return typeof uuid === 'string' && REGEX.test(uuid);
+}
+
+export default validate; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-browser/version.js b/node_modules/uuid/dist/esm-browser/version.js
new file mode 100644
index 0000000..77530e9
--- /dev/null
+++ b/node_modules/uuid/dist/esm-browser/version.js
@@ -0,0 +1,11 @@
+import validate from './validate.js';
+
+function version(uuid) {
+ if (!validate(uuid)) {
+ throw TypeError('Invalid UUID');
+ }
+
+ return parseInt(uuid.substr(14, 1), 16);
+}
+
+export default version; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/index.js b/node_modules/uuid/dist/esm-node/index.js
new file mode 100644
index 0000000..1db6f6d
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/index.js
@@ -0,0 +1,9 @@
+export { default as v1 } from './v1.js';
+export { default as v3 } from './v3.js';
+export { default as v4 } from './v4.js';
+export { default as v5 } from './v5.js';
+export { default as NIL } from './nil.js';
+export { default as version } from './version.js';
+export { default as validate } from './validate.js';
+export { default as stringify } from './stringify.js';
+export { default as parse } from './parse.js'; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/md5.js b/node_modules/uuid/dist/esm-node/md5.js
new file mode 100644
index 0000000..4d68b04
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/md5.js
@@ -0,0 +1,13 @@
+import crypto from 'crypto';
+
+function md5(bytes) {
+ if (Array.isArray(bytes)) {
+ bytes = Buffer.from(bytes);
+ } else if (typeof bytes === 'string') {
+ bytes = Buffer.from(bytes, 'utf8');
+ }
+
+ return crypto.createHash('md5').update(bytes).digest();
+}
+
+export default md5; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/nil.js b/node_modules/uuid/dist/esm-node/nil.js
new file mode 100644
index 0000000..b36324c
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/nil.js
@@ -0,0 +1 @@
+export default '00000000-0000-0000-0000-000000000000'; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/parse.js b/node_modules/uuid/dist/esm-node/parse.js
new file mode 100644
index 0000000..6421c5d
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/parse.js
@@ -0,0 +1,35 @@
+import validate from './validate.js';
+
+function parse(uuid) {
+ if (!validate(uuid)) {
+ throw TypeError('Invalid UUID');
+ }
+
+ let v;
+ const arr = new Uint8Array(16); // Parse ########-....-....-....-............
+
+ arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
+ arr[1] = v >>> 16 & 0xff;
+ arr[2] = v >>> 8 & 0xff;
+ arr[3] = v & 0xff; // Parse ........-####-....-....-............
+
+ arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
+ arr[5] = v & 0xff; // Parse ........-....-####-....-............
+
+ arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
+ arr[7] = v & 0xff; // Parse ........-....-....-####-............
+
+ arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
+ arr[9] = v & 0xff; // Parse ........-....-....-....-############
+ // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
+
+ arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
+ arr[11] = v / 0x100000000 & 0xff;
+ arr[12] = v >>> 24 & 0xff;
+ arr[13] = v >>> 16 & 0xff;
+ arr[14] = v >>> 8 & 0xff;
+ arr[15] = v & 0xff;
+ return arr;
+}
+
+export default parse; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/regex.js b/node_modules/uuid/dist/esm-node/regex.js
new file mode 100644
index 0000000..3da8673
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/regex.js
@@ -0,0 +1 @@
+export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/rng.js b/node_modules/uuid/dist/esm-node/rng.js
new file mode 100644
index 0000000..8006244
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/rng.js
@@ -0,0 +1,12 @@
+import crypto from 'crypto';
+const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate
+
+let poolPtr = rnds8Pool.length;
+export default function rng() {
+ if (poolPtr > rnds8Pool.length - 16) {
+ crypto.randomFillSync(rnds8Pool);
+ poolPtr = 0;
+ }
+
+ return rnds8Pool.slice(poolPtr, poolPtr += 16);
+} \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/sha1.js b/node_modules/uuid/dist/esm-node/sha1.js
new file mode 100644
index 0000000..e23850b
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/sha1.js
@@ -0,0 +1,13 @@
+import crypto from 'crypto';
+
+function sha1(bytes) {
+ if (Array.isArray(bytes)) {
+ bytes = Buffer.from(bytes);
+ } else if (typeof bytes === 'string') {
+ bytes = Buffer.from(bytes, 'utf8');
+ }
+
+ return crypto.createHash('sha1').update(bytes).digest();
+}
+
+export default sha1; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/stringify.js b/node_modules/uuid/dist/esm-node/stringify.js
new file mode 100644
index 0000000..f9bca12
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/stringify.js
@@ -0,0 +1,29 @@
+import validate from './validate.js';
+/**
+ * Convert array of 16 byte values to UUID string format of the form:
+ * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
+ */
+
+const byteToHex = [];
+
+for (let i = 0; i < 256; ++i) {
+ byteToHex.push((i + 0x100).toString(16).substr(1));
+}
+
+function stringify(arr, offset = 0) {
+ // Note: Be careful editing this code! It's been tuned for performance
+ // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
+ const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one
+ // of the following:
+ // - One or more input array values don't map to a hex octet (leading to
+ // "undefined" in the uuid)
+ // - Invalid input values for the RFC `version` or `variant` fields
+
+ if (!validate(uuid)) {
+ throw TypeError('Stringified UUID is invalid');
+ }
+
+ return uuid;
+}
+
+export default stringify; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/v1.js b/node_modules/uuid/dist/esm-node/v1.js
new file mode 100644
index 0000000..ebf81ac
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/v1.js
@@ -0,0 +1,95 @@
+import rng from './rng.js';
+import stringify from './stringify.js'; // **`v1()` - Generate time-based UUID**
+//
+// Inspired by https://github.com/LiosK/UUID.js
+// and http://docs.python.org/library/uuid.html
+
+let _nodeId;
+
+let _clockseq; // Previous uuid creation time
+
+
+let _lastMSecs = 0;
+let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
+
+function v1(options, buf, offset) {
+ let i = buf && offset || 0;
+ const b = buf || new Array(16);
+ options = options || {};
+ let node = options.node || _nodeId;
+ let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
+ // specified. We do this lazily to minimize issues related to insufficient
+ // system entropy. See #189
+
+ if (node == null || clockseq == null) {
+ const seedBytes = options.random || (options.rng || rng)();
+
+ if (node == null) {
+ // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
+ node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
+ }
+
+ if (clockseq == null) {
+ // Per 4.2.2, randomize (14 bit) clockseq
+ clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
+ }
+ } // UUID timestamps are 100 nano-second units since the Gregorian epoch,
+ // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
+ // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
+ // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
+
+
+ let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
+ // cycle to simulate higher resolution clock
+
+ let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
+
+ const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
+
+ if (dt < 0 && options.clockseq === undefined) {
+ clockseq = clockseq + 1 & 0x3fff;
+ } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
+ // time interval
+
+
+ if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
+ nsecs = 0;
+ } // Per 4.2.1.2 Throw error if too many uuids are requested
+
+
+ if (nsecs >= 10000) {
+ throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
+ }
+
+ _lastMSecs = msecs;
+ _lastNSecs = nsecs;
+ _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
+
+ msecs += 12219292800000; // `time_low`
+
+ const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
+ b[i++] = tl >>> 24 & 0xff;
+ b[i++] = tl >>> 16 & 0xff;
+ b[i++] = tl >>> 8 & 0xff;
+ b[i++] = tl & 0xff; // `time_mid`
+
+ const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
+ b[i++] = tmh >>> 8 & 0xff;
+ b[i++] = tmh & 0xff; // `time_high_and_version`
+
+ b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
+
+ b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
+
+ b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
+
+ b[i++] = clockseq & 0xff; // `node`
+
+ for (let n = 0; n < 6; ++n) {
+ b[i + n] = node[n];
+ }
+
+ return buf || stringify(b);
+}
+
+export default v1; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/v3.js b/node_modules/uuid/dist/esm-node/v3.js
new file mode 100644
index 0000000..09063b8
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/v3.js
@@ -0,0 +1,4 @@
+import v35 from './v35.js';
+import md5 from './md5.js';
+const v3 = v35('v3', 0x30, md5);
+export default v3; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/v35.js b/node_modules/uuid/dist/esm-node/v35.js
new file mode 100644
index 0000000..22f6a19
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/v35.js
@@ -0,0 +1,64 @@
+import stringify from './stringify.js';
+import parse from './parse.js';
+
+function stringToBytes(str) {
+ str = unescape(encodeURIComponent(str)); // UTF8 escape
+
+ const bytes = [];
+
+ for (let i = 0; i < str.length; ++i) {
+ bytes.push(str.charCodeAt(i));
+ }
+
+ return bytes;
+}
+
+export const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
+export const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
+export default function (name, version, hashfunc) {
+ function generateUUID(value, namespace, buf, offset) {
+ if (typeof value === 'string') {
+ value = stringToBytes(value);
+ }
+
+ if (typeof namespace === 'string') {
+ namespace = parse(namespace);
+ }
+
+ if (namespace.length !== 16) {
+ throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
+ } // Compute hash of namespace and value, Per 4.3
+ // Future: Use spread syntax when supported on all platforms, e.g. `bytes =
+ // hashfunc([...namespace, ... value])`
+
+
+ let bytes = new Uint8Array(16 + value.length);
+ bytes.set(namespace);
+ bytes.set(value, namespace.length);
+ bytes = hashfunc(bytes);
+ bytes[6] = bytes[6] & 0x0f | version;
+ bytes[8] = bytes[8] & 0x3f | 0x80;
+
+ if (buf) {
+ offset = offset || 0;
+
+ for (let i = 0; i < 16; ++i) {
+ buf[offset + i] = bytes[i];
+ }
+
+ return buf;
+ }
+
+ return stringify(bytes);
+ } // Function#name is not settable on some platforms (#270)
+
+
+ try {
+ generateUUID.name = name; // eslint-disable-next-line no-empty
+ } catch (err) {} // For CommonJS default export support
+
+
+ generateUUID.DNS = DNS;
+ generateUUID.URL = URL;
+ return generateUUID;
+} \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/v4.js b/node_modules/uuid/dist/esm-node/v4.js
new file mode 100644
index 0000000..efad926
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/v4.js
@@ -0,0 +1,24 @@
+import rng from './rng.js';
+import stringify from './stringify.js';
+
+function v4(options, buf, offset) {
+ options = options || {};
+ const rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
+
+ rnds[6] = rnds[6] & 0x0f | 0x40;
+ rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
+
+ if (buf) {
+ offset = offset || 0;
+
+ for (let i = 0; i < 16; ++i) {
+ buf[offset + i] = rnds[i];
+ }
+
+ return buf;
+ }
+
+ return stringify(rnds);
+}
+
+export default v4; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/v5.js b/node_modules/uuid/dist/esm-node/v5.js
new file mode 100644
index 0000000..e87fe31
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/v5.js
@@ -0,0 +1,4 @@
+import v35 from './v35.js';
+import sha1 from './sha1.js';
+const v5 = v35('v5', 0x50, sha1);
+export default v5; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/validate.js b/node_modules/uuid/dist/esm-node/validate.js
new file mode 100644
index 0000000..f1cdc7a
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/validate.js
@@ -0,0 +1,7 @@
+import REGEX from './regex.js';
+
+function validate(uuid) {
+ return typeof uuid === 'string' && REGEX.test(uuid);
+}
+
+export default validate; \ No newline at end of file
diff --git a/node_modules/uuid/dist/esm-node/version.js b/node_modules/uuid/dist/esm-node/version.js
new file mode 100644
index 0000000..77530e9
--- /dev/null
+++ b/node_modules/uuid/dist/esm-node/version.js
@@ -0,0 +1,11 @@
+import validate from './validate.js';
+
+function version(uuid) {
+ if (!validate(uuid)) {
+ throw TypeError('Invalid UUID');
+ }
+
+ return parseInt(uuid.substr(14, 1), 16);
+}
+
+export default version; \ No newline at end of file
diff --git a/node_modules/uuid/dist/index.js b/node_modules/uuid/dist/index.js
new file mode 100644
index 0000000..bf13b10
--- /dev/null
+++ b/node_modules/uuid/dist/index.js
@@ -0,0 +1,79 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+Object.defineProperty(exports, "v1", {
+ enumerable: true,
+ get: function () {
+ return _v.default;
+ }
+});
+Object.defineProperty(exports, "v3", {
+ enumerable: true,
+ get: function () {
+ return _v2.default;
+ }
+});
+Object.defineProperty(exports, "v4", {
+ enumerable: true,
+ get: function () {
+ return _v3.default;
+ }
+});
+Object.defineProperty(exports, "v5", {
+ enumerable: true,
+ get: function () {
+ return _v4.default;
+ }
+});
+Object.defineProperty(exports, "NIL", {
+ enumerable: true,
+ get: function () {
+ return _nil.default;
+ }
+});
+Object.defineProperty(exports, "version", {
+ enumerable: true,
+ get: function () {
+ return _version.default;
+ }
+});
+Object.defineProperty(exports, "validate", {
+ enumerable: true,
+ get: function () {
+ return _validate.default;
+ }
+});
+Object.defineProperty(exports, "stringify", {
+ enumerable: true,
+ get: function () {
+ return _stringify.default;
+ }
+});
+Object.defineProperty(exports, "parse", {
+ enumerable: true,
+ get: function () {
+ return _parse.default;
+ }
+});
+
+var _v = _interopRequireDefault(require("./v1.js"));
+
+var _v2 = _interopRequireDefault(require("./v3.js"));
+
+var _v3 = _interopRequireDefault(require("./v4.js"));
+
+var _v4 = _interopRequireDefault(require("./v5.js"));
+
+var _nil = _interopRequireDefault(require("./nil.js"));
+
+var _version = _interopRequireDefault(require("./version.js"));
+
+var _validate = _interopRequireDefault(require("./validate.js"));
+
+var _stringify = _interopRequireDefault(require("./stringify.js"));
+
+var _parse = _interopRequireDefault(require("./parse.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } \ No newline at end of file
diff --git a/node_modules/uuid/dist/md5-browser.js b/node_modules/uuid/dist/md5-browser.js
new file mode 100644
index 0000000..7a4582a
--- /dev/null
+++ b/node_modules/uuid/dist/md5-browser.js
@@ -0,0 +1,223 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+/*
+ * Browser-compatible JavaScript MD5
+ *
+ * Modification of JavaScript MD5
+ * https://github.com/blueimp/JavaScript-MD5
+ *
+ * Copyright 2011, Sebastian Tschan
+ * https://blueimp.net
+ *
+ * Licensed under the MIT license:
+ * https://opensource.org/licenses/MIT
+ *
+ * Based on
+ * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message
+ * Digest Algorithm, as defined in RFC 1321.
+ * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009
+ * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet
+ * Distributed under the BSD License
+ * See http://pajhome.org.uk/crypt/md5 for more info.
+ */
+function md5(bytes) {
+ if (typeof bytes === 'string') {
+ const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
+
+ bytes = new Uint8Array(msg.length);
+
+ for (let i = 0; i < msg.length; ++i) {
+ bytes[i] = msg.charCodeAt(i);
+ }
+ }
+
+ return md5ToHexEncodedArray(wordsToMd5(bytesToWords(bytes), bytes.length * 8));
+}
+/*
+ * Convert an array of little-endian words to an array of bytes
+ */
+
+
+function md5ToHexEncodedArray(input) {
+ const output = [];
+ const length32 = input.length * 32;
+ const hexTab = '0123456789abcdef';
+
+ for (let i = 0; i < length32; i += 8) {
+ const x = input[i >> 5] >>> i % 32 & 0xff;
+ const hex = parseInt(hexTab.charAt(x >>> 4 & 0x0f) + hexTab.charAt(x & 0x0f), 16);
+ output.push(hex);
+ }
+
+ return output;
+}
+/**
+ * Calculate output length with padding and bit length
+ */
+
+
+function getOutputLength(inputLength8) {
+ return (inputLength8 + 64 >>> 9 << 4) + 14 + 1;
+}
+/*
+ * Calculate the MD5 of an array of little-endian words, and a bit length.
+ */
+
+
+function wordsToMd5(x, len) {
+ /* append padding */
+ x[len >> 5] |= 0x80 << len % 32;
+ x[getOutputLength(len) - 1] = len;
+ let a = 1732584193;
+ let b = -271733879;
+ let c = -1732584194;
+ let d = 271733878;
+
+ for (let i = 0; i < x.length; i += 16) {
+ const olda = a;
+ const oldb = b;
+ const oldc = c;
+ const oldd = d;
+ a = md5ff(a, b, c, d, x[i], 7, -680876936);
+ d = md5ff(d, a, b, c, x[i + 1], 12, -389564586);
+ c = md5ff(c, d, a, b, x[i + 2], 17, 606105819);
+ b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330);
+ a = md5ff(a, b, c, d, x[i + 4], 7, -176418897);
+ d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426);
+ c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341);
+ b = md5ff(b, c, d, a, x[i + 7], 22, -45705983);
+ a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416);
+ d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417);
+ c = md5ff(c, d, a, b, x[i + 10], 17, -42063);
+ b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162);
+ a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682);
+ d = md5ff(d, a, b, c, x[i + 13], 12, -40341101);
+ c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290);
+ b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329);
+ a = md5gg(a, b, c, d, x[i + 1], 5, -165796510);
+ d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632);
+ c = md5gg(c, d, a, b, x[i + 11], 14, 643717713);
+ b = md5gg(b, c, d, a, x[i], 20, -373897302);
+ a = md5gg(a, b, c, d, x[i + 5], 5, -701558691);
+ d = md5gg(d, a, b, c, x[i + 10], 9, 38016083);
+ c = md5gg(c, d, a, b, x[i + 15], 14, -660478335);
+ b = md5gg(b, c, d, a, x[i + 4], 20, -405537848);
+ a = md5gg(a, b, c, d, x[i + 9], 5, 568446438);
+ d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690);
+ c = md5gg(c, d, a, b, x[i + 3], 14, -187363961);
+ b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501);
+ a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467);
+ d = md5gg(d, a, b, c, x[i + 2], 9, -51403784);
+ c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473);
+ b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734);
+ a = md5hh(a, b, c, d, x[i + 5], 4, -378558);
+ d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463);
+ c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562);
+ b = md5hh(b, c, d, a, x[i + 14], 23, -35309556);
+ a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060);
+ d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353);
+ c = md5hh(c, d, a, b, x[i + 7], 16, -155497632);
+ b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640);
+ a = md5hh(a, b, c, d, x[i + 13], 4, 681279174);
+ d = md5hh(d, a, b, c, x[i], 11, -358537222);
+ c = md5hh(c, d, a, b, x[i + 3], 16, -722521979);
+ b = md5hh(b, c, d, a, x[i + 6], 23, 76029189);
+ a = md5hh(a, b, c, d, x[i + 9], 4, -640364487);
+ d = md5hh(d, a, b, c, x[i + 12], 11, -421815835);
+ c = md5hh(c, d, a, b, x[i + 15], 16, 530742520);
+ b = md5hh(b, c, d, a, x[i + 2], 23, -995338651);
+ a = md5ii(a, b, c, d, x[i], 6, -198630844);
+ d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415);
+ c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905);
+ b = md5ii(b, c, d, a, x[i + 5], 21, -57434055);
+ a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571);
+ d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606);
+ c = md5ii(c, d, a, b, x[i + 10], 15, -1051523);
+ b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799);
+ a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359);
+ d = md5ii(d, a, b, c, x[i + 15], 10, -30611744);
+ c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380);
+ b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649);
+ a = md5ii(a, b, c, d, x[i + 4], 6, -145523070);
+ d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379);
+ c = md5ii(c, d, a, b, x[i + 2], 15, 718787259);
+ b = md5ii(b, c, d, a, x[i + 9], 21, -343485551);
+ a = safeAdd(a, olda);
+ b = safeAdd(b, oldb);
+ c = safeAdd(c, oldc);
+ d = safeAdd(d, oldd);
+ }
+
+ return [a, b, c, d];
+}
+/*
+ * Convert an array bytes to an array of little-endian words
+ * Characters >255 have their high-byte silently ignored.
+ */
+
+
+function bytesToWords(input) {
+ if (input.length === 0) {
+ return [];
+ }
+
+ const length8 = input.length * 8;
+ const output = new Uint32Array(getOutputLength(length8));
+
+ for (let i = 0; i < length8; i += 8) {
+ output[i >> 5] |= (input[i / 8] & 0xff) << i % 32;
+ }
+
+ return output;
+}
+/*
+ * Add integers, wrapping at 2^32. This uses 16-bit operations internally
+ * to work around bugs in some JS interpreters.
+ */
+
+
+function safeAdd(x, y) {
+ const lsw = (x & 0xffff) + (y & 0xffff);
+ const msw = (x >> 16) + (y >> 16) + (lsw >> 16);
+ return msw << 16 | lsw & 0xffff;
+}
+/*
+ * Bitwise rotate a 32-bit number to the left.
+ */
+
+
+function bitRotateLeft(num, cnt) {
+ return num << cnt | num >>> 32 - cnt;
+}
+/*
+ * These functions implement the four basic operations the algorithm uses.
+ */
+
+
+function md5cmn(q, a, b, x, s, t) {
+ return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b);
+}
+
+function md5ff(a, b, c, d, x, s, t) {
+ return md5cmn(b & c | ~b & d, a, b, x, s, t);
+}
+
+function md5gg(a, b, c, d, x, s, t) {
+ return md5cmn(b & d | c & ~d, a, b, x, s, t);
+}
+
+function md5hh(a, b, c, d, x, s, t) {
+ return md5cmn(b ^ c ^ d, a, b, x, s, t);
+}
+
+function md5ii(a, b, c, d, x, s, t) {
+ return md5cmn(c ^ (b | ~d), a, b, x, s, t);
+}
+
+var _default = md5;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/md5.js b/node_modules/uuid/dist/md5.js
new file mode 100644
index 0000000..824d481
--- /dev/null
+++ b/node_modules/uuid/dist/md5.js
@@ -0,0 +1,23 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _crypto = _interopRequireDefault(require("crypto"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function md5(bytes) {
+ if (Array.isArray(bytes)) {
+ bytes = Buffer.from(bytes);
+ } else if (typeof bytes === 'string') {
+ bytes = Buffer.from(bytes, 'utf8');
+ }
+
+ return _crypto.default.createHash('md5').update(bytes).digest();
+}
+
+var _default = md5;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/nil.js b/node_modules/uuid/dist/nil.js
new file mode 100644
index 0000000..7ade577
--- /dev/null
+++ b/node_modules/uuid/dist/nil.js
@@ -0,0 +1,8 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+var _default = '00000000-0000-0000-0000-000000000000';
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/parse.js b/node_modules/uuid/dist/parse.js
new file mode 100644
index 0000000..4c69fc3
--- /dev/null
+++ b/node_modules/uuid/dist/parse.js
@@ -0,0 +1,45 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _validate = _interopRequireDefault(require("./validate.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function parse(uuid) {
+ if (!(0, _validate.default)(uuid)) {
+ throw TypeError('Invalid UUID');
+ }
+
+ let v;
+ const arr = new Uint8Array(16); // Parse ########-....-....-....-............
+
+ arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;
+ arr[1] = v >>> 16 & 0xff;
+ arr[2] = v >>> 8 & 0xff;
+ arr[3] = v & 0xff; // Parse ........-####-....-....-............
+
+ arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;
+ arr[5] = v & 0xff; // Parse ........-....-####-....-............
+
+ arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;
+ arr[7] = v & 0xff; // Parse ........-....-....-####-............
+
+ arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;
+ arr[9] = v & 0xff; // Parse ........-....-....-....-############
+ // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes)
+
+ arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;
+ arr[11] = v / 0x100000000 & 0xff;
+ arr[12] = v >>> 24 & 0xff;
+ arr[13] = v >>> 16 & 0xff;
+ arr[14] = v >>> 8 & 0xff;
+ arr[15] = v & 0xff;
+ return arr;
+}
+
+var _default = parse;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/regex.js b/node_modules/uuid/dist/regex.js
new file mode 100644
index 0000000..1ef91d6
--- /dev/null
+++ b/node_modules/uuid/dist/regex.js
@@ -0,0 +1,8 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/rng-browser.js b/node_modules/uuid/dist/rng-browser.js
new file mode 100644
index 0000000..91faeae
--- /dev/null
+++ b/node_modules/uuid/dist/rng-browser.js
@@ -0,0 +1,26 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = rng;
+// Unique ID creation requires a high quality random # generator. In the browser we therefore
+// require the crypto API and do not support built-in fallback to lower quality random number
+// generators (like Math.random()).
+let getRandomValues;
+const rnds8 = new Uint8Array(16);
+
+function rng() {
+ // lazy load so that environments that need to polyfill have a chance to do so
+ if (!getRandomValues) {
+ // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also,
+ // find the complete implementation of crypto (msCrypto) on IE11.
+ getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto);
+
+ if (!getRandomValues) {
+ throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported');
+ }
+ }
+
+ return getRandomValues(rnds8);
+} \ No newline at end of file
diff --git a/node_modules/uuid/dist/rng.js b/node_modules/uuid/dist/rng.js
new file mode 100644
index 0000000..3507f93
--- /dev/null
+++ b/node_modules/uuid/dist/rng.js
@@ -0,0 +1,24 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = rng;
+
+var _crypto = _interopRequireDefault(require("crypto"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate
+
+let poolPtr = rnds8Pool.length;
+
+function rng() {
+ if (poolPtr > rnds8Pool.length - 16) {
+ _crypto.default.randomFillSync(rnds8Pool);
+
+ poolPtr = 0;
+ }
+
+ return rnds8Pool.slice(poolPtr, poolPtr += 16);
+} \ No newline at end of file
diff --git a/node_modules/uuid/dist/sha1-browser.js b/node_modules/uuid/dist/sha1-browser.js
new file mode 100644
index 0000000..24cbced
--- /dev/null
+++ b/node_modules/uuid/dist/sha1-browser.js
@@ -0,0 +1,104 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+// Adapted from Chris Veness' SHA1 code at
+// http://www.movable-type.co.uk/scripts/sha1.html
+function f(s, x, y, z) {
+ switch (s) {
+ case 0:
+ return x & y ^ ~x & z;
+
+ case 1:
+ return x ^ y ^ z;
+
+ case 2:
+ return x & y ^ x & z ^ y & z;
+
+ case 3:
+ return x ^ y ^ z;
+ }
+}
+
+function ROTL(x, n) {
+ return x << n | x >>> 32 - n;
+}
+
+function sha1(bytes) {
+ const K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6];
+ const H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0];
+
+ if (typeof bytes === 'string') {
+ const msg = unescape(encodeURIComponent(bytes)); // UTF8 escape
+
+ bytes = [];
+
+ for (let i = 0; i < msg.length; ++i) {
+ bytes.push(msg.charCodeAt(i));
+ }
+ } else if (!Array.isArray(bytes)) {
+ // Convert Array-like to Array
+ bytes = Array.prototype.slice.call(bytes);
+ }
+
+ bytes.push(0x80);
+ const l = bytes.length / 4 + 2;
+ const N = Math.ceil(l / 16);
+ const M = new Array(N);
+
+ for (let i = 0; i < N; ++i) {
+ const arr = new Uint32Array(16);
+
+ for (let j = 0; j < 16; ++j) {
+ arr[j] = bytes[i * 64 + j * 4] << 24 | bytes[i * 64 + j * 4 + 1] << 16 | bytes[i * 64 + j * 4 + 2] << 8 | bytes[i * 64 + j * 4 + 3];
+ }
+
+ M[i] = arr;
+ }
+
+ M[N - 1][14] = (bytes.length - 1) * 8 / Math.pow(2, 32);
+ M[N - 1][14] = Math.floor(M[N - 1][14]);
+ M[N - 1][15] = (bytes.length - 1) * 8 & 0xffffffff;
+
+ for (let i = 0; i < N; ++i) {
+ const W = new Uint32Array(80);
+
+ for (let t = 0; t < 16; ++t) {
+ W[t] = M[i][t];
+ }
+
+ for (let t = 16; t < 80; ++t) {
+ W[t] = ROTL(W[t - 3] ^ W[t - 8] ^ W[t - 14] ^ W[t - 16], 1);
+ }
+
+ let a = H[0];
+ let b = H[1];
+ let c = H[2];
+ let d = H[3];
+ let e = H[4];
+
+ for (let t = 0; t < 80; ++t) {
+ const s = Math.floor(t / 20);
+ const T = ROTL(a, 5) + f(s, b, c, d) + e + K[s] + W[t] >>> 0;
+ e = d;
+ d = c;
+ c = ROTL(b, 30) >>> 0;
+ b = a;
+ a = T;
+ }
+
+ H[0] = H[0] + a >>> 0;
+ H[1] = H[1] + b >>> 0;
+ H[2] = H[2] + c >>> 0;
+ H[3] = H[3] + d >>> 0;
+ H[4] = H[4] + e >>> 0;
+ }
+
+ return [H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff];
+}
+
+var _default = sha1;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/sha1.js b/node_modules/uuid/dist/sha1.js
new file mode 100644
index 0000000..03bdd63
--- /dev/null
+++ b/node_modules/uuid/dist/sha1.js
@@ -0,0 +1,23 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _crypto = _interopRequireDefault(require("crypto"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function sha1(bytes) {
+ if (Array.isArray(bytes)) {
+ bytes = Buffer.from(bytes);
+ } else if (typeof bytes === 'string') {
+ bytes = Buffer.from(bytes, 'utf8');
+ }
+
+ return _crypto.default.createHash('sha1').update(bytes).digest();
+}
+
+var _default = sha1;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/stringify.js b/node_modules/uuid/dist/stringify.js
new file mode 100644
index 0000000..b8e7519
--- /dev/null
+++ b/node_modules/uuid/dist/stringify.js
@@ -0,0 +1,39 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _validate = _interopRequireDefault(require("./validate.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+/**
+ * Convert array of 16 byte values to UUID string format of the form:
+ * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
+ */
+const byteToHex = [];
+
+for (let i = 0; i < 256; ++i) {
+ byteToHex.push((i + 0x100).toString(16).substr(1));
+}
+
+function stringify(arr, offset = 0) {
+ // Note: Be careful editing this code! It's been tuned for performance
+ // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434
+ const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one
+ // of the following:
+ // - One or more input array values don't map to a hex octet (leading to
+ // "undefined" in the uuid)
+ // - Invalid input values for the RFC `version` or `variant` fields
+
+ if (!(0, _validate.default)(uuid)) {
+ throw TypeError('Stringified UUID is invalid');
+ }
+
+ return uuid;
+}
+
+var _default = stringify;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/umd/uuid.min.js b/node_modules/uuid/dist/umd/uuid.min.js
new file mode 100644
index 0000000..639ca2f
--- /dev/null
+++ b/node_modules/uuid/dist/umd/uuid.min.js
@@ -0,0 +1 @@
+!function(r,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((r="undefined"!=typeof globalThis?globalThis:r||self).uuid={})}(this,(function(r){"use strict";var e,n=new Uint8Array(16);function t(){if(!e&&!(e="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return e(n)}var o=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function a(r){return"string"==typeof r&&o.test(r)}for(var i,u,f=[],s=0;s<256;++s)f.push((s+256).toString(16).substr(1));function c(r){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=(f[r[e+0]]+f[r[e+1]]+f[r[e+2]]+f[r[e+3]]+"-"+f[r[e+4]]+f[r[e+5]]+"-"+f[r[e+6]]+f[r[e+7]]+"-"+f[r[e+8]]+f[r[e+9]]+"-"+f[r[e+10]]+f[r[e+11]]+f[r[e+12]]+f[r[e+13]]+f[r[e+14]]+f[r[e+15]]).toLowerCase();if(!a(n))throw TypeError("Stringified UUID is invalid");return n}var l=0,d=0;function v(r){if(!a(r))throw TypeError("Invalid UUID");var e,n=new Uint8Array(16);return n[0]=(e=parseInt(r.slice(0,8),16))>>>24,n[1]=e>>>16&255,n[2]=e>>>8&255,n[3]=255&e,n[4]=(e=parseInt(r.slice(9,13),16))>>>8,n[5]=255&e,n[6]=(e=parseInt(r.slice(14,18),16))>>>8,n[7]=255&e,n[8]=(e=parseInt(r.slice(19,23),16))>>>8,n[9]=255&e,n[10]=(e=parseInt(r.slice(24,36),16))/1099511627776&255,n[11]=e/4294967296&255,n[12]=e>>>24&255,n[13]=e>>>16&255,n[14]=e>>>8&255,n[15]=255&e,n}function p(r,e,n){function t(r,t,o,a){if("string"==typeof r&&(r=function(r){r=unescape(encodeURIComponent(r));for(var e=[],n=0;n<r.length;++n)e.push(r.charCodeAt(n));return e}(r)),"string"==typeof t&&(t=v(t)),16!==t.length)throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)");var i=new Uint8Array(16+r.length);if(i.set(t),i.set(r,t.length),(i=n(i))[6]=15&i[6]|e,i[8]=63&i[8]|128,o){a=a||0;for(var u=0;u<16;++u)o[a+u]=i[u];return o}return c(i)}try{t.name=r}catch(r){}return t.DNS="6ba7b810-9dad-11d1-80b4-00c04fd430c8",t.URL="6ba7b811-9dad-11d1-80b4-00c04fd430c8",t}function h(r){return 14+(r+64>>>9<<4)+1}function y(r,e){var n=(65535&r)+(65535&e);return(r>>16)+(e>>16)+(n>>16)<<16|65535&n}function g(r,e,n,t,o,a){return y((i=y(y(e,r),y(t,a)))<<(u=o)|i>>>32-u,n);var i,u}function m(r,e,n,t,o,a,i){return g(e&n|~e&t,r,e,o,a,i)}function w(r,e,n,t,o,a,i){return g(e&t|n&~t,r,e,o,a,i)}function b(r,e,n,t,o,a,i){return g(e^n^t,r,e,o,a,i)}function A(r,e,n,t,o,a,i){return g(n^(e|~t),r,e,o,a,i)}var U=p("v3",48,(function(r){if("string"==typeof r){var e=unescape(encodeURIComponent(r));r=new Uint8Array(e.length);for(var n=0;n<e.length;++n)r[n]=e.charCodeAt(n)}return function(r){for(var e=[],n=32*r.length,t="0123456789abcdef",o=0;o<n;o+=8){var a=r[o>>5]>>>o%32&255,i=parseInt(t.charAt(a>>>4&15)+t.charAt(15&a),16);e.push(i)}return e}(function(r,e){r[e>>5]|=128<<e%32,r[h(e)-1]=e;for(var n=1732584193,t=-271733879,o=-1732584194,a=271733878,i=0;i<r.length;i+=16){var u=n,f=t,s=o,c=a;n=m(n,t,o,a,r[i],7,-680876936),a=m(a,n,t,o,r[i+1],12,-389564586),o=m(o,a,n,t,r[i+2],17,606105819),t=m(t,o,a,n,r[i+3],22,-1044525330),n=m(n,t,o,a,r[i+4],7,-176418897),a=m(a,n,t,o,r[i+5],12,1200080426),o=m(o,a,n,t,r[i+6],17,-1473231341),t=m(t,o,a,n,r[i+7],22,-45705983),n=m(n,t,o,a,r[i+8],7,1770035416),a=m(a,n,t,o,r[i+9],12,-1958414417),o=m(o,a,n,t,r[i+10],17,-42063),t=m(t,o,a,n,r[i+11],22,-1990404162),n=m(n,t,o,a,r[i+12],7,1804603682),a=m(a,n,t,o,r[i+13],12,-40341101),o=m(o,a,n,t,r[i+14],17,-1502002290),n=w(n,t=m(t,o,a,n,r[i+15],22,1236535329),o,a,r[i+1],5,-165796510),a=w(a,n,t,o,r[i+6],9,-1069501632),o=w(o,a,n,t,r[i+11],14,643717713),t=w(t,o,a,n,r[i],20,-373897302),n=w(n,t,o,a,r[i+5],5,-701558691),a=w(a,n,t,o,r[i+10],9,38016083),o=w(o,a,n,t,r[i+15],14,-660478335),t=w(t,o,a,n,r[i+4],20,-405537848),n=w(n,t,o,a,r[i+9],5,568446438),a=w(a,n,t,o,r[i+14],9,-1019803690),o=w(o,a,n,t,r[i+3],14,-187363961),t=w(t,o,a,n,r[i+8],20,1163531501),n=w(n,t,o,a,r[i+13],5,-1444681467),a=w(a,n,t,o,r[i+2],9,-51403784),o=w(o,a,n,t,r[i+7],14,1735328473),n=b(n,t=w(t,o,a,n,r[i+12],20,-1926607734),o,a,r[i+5],4,-378558),a=b(a,n,t,o,r[i+8],11,-2022574463),o=b(o,a,n,t,r[i+11],16,1839030562),t=b(t,o,a,n,r[i+14],23,-35309556),n=b(n,t,o,a,r[i+1],4,-1530992060),a=b(a,n,t,o,r[i+4],11,1272893353),o=b(o,a,n,t,r[i+7],16,-155497632),t=b(t,o,a,n,r[i+10],23,-1094730640),n=b(n,t,o,a,r[i+13],4,681279174),a=b(a,n,t,o,r[i],11,-358537222),o=b(o,a,n,t,r[i+3],16,-722521979),t=b(t,o,a,n,r[i+6],23,76029189),n=b(n,t,o,a,r[i+9],4,-640364487),a=b(a,n,t,o,r[i+12],11,-421815835),o=b(o,a,n,t,r[i+15],16,530742520),n=A(n,t=b(t,o,a,n,r[i+2],23,-995338651),o,a,r[i],6,-198630844),a=A(a,n,t,o,r[i+7],10,1126891415),o=A(o,a,n,t,r[i+14],15,-1416354905),t=A(t,o,a,n,r[i+5],21,-57434055),n=A(n,t,o,a,r[i+12],6,1700485571),a=A(a,n,t,o,r[i+3],10,-1894986606),o=A(o,a,n,t,r[i+10],15,-1051523),t=A(t,o,a,n,r[i+1],21,-2054922799),n=A(n,t,o,a,r[i+8],6,1873313359),a=A(a,n,t,o,r[i+15],10,-30611744),o=A(o,a,n,t,r[i+6],15,-1560198380),t=A(t,o,a,n,r[i+13],21,1309151649),n=A(n,t,o,a,r[i+4],6,-145523070),a=A(a,n,t,o,r[i+11],10,-1120210379),o=A(o,a,n,t,r[i+2],15,718787259),t=A(t,o,a,n,r[i+9],21,-343485551),n=y(n,u),t=y(t,f),o=y(o,s),a=y(a,c)}return[n,t,o,a]}(function(r){if(0===r.length)return[];for(var e=8*r.length,n=new Uint32Array(h(e)),t=0;t<e;t+=8)n[t>>5]|=(255&r[t/8])<<t%32;return n}(r),8*r.length))}));function I(r,e,n,t){switch(r){case 0:return e&n^~e&t;case 1:return e^n^t;case 2:return e&n^e&t^n&t;case 3:return e^n^t}}function C(r,e){return r<<e|r>>>32-e}var R=p("v5",80,(function(r){var e=[1518500249,1859775393,2400959708,3395469782],n=[1732584193,4023233417,2562383102,271733878,3285377520];if("string"==typeof r){var t=unescape(encodeURIComponent(r));r=[];for(var o=0;o<t.length;++o)r.push(t.charCodeAt(o))}else Array.isArray(r)||(r=Array.prototype.slice.call(r));r.push(128);for(var a=r.length/4+2,i=Math.ceil(a/16),u=new Array(i),f=0;f<i;++f){for(var s=new Uint32Array(16),c=0;c<16;++c)s[c]=r[64*f+4*c]<<24|r[64*f+4*c+1]<<16|r[64*f+4*c+2]<<8|r[64*f+4*c+3];u[f]=s}u[i-1][14]=8*(r.length-1)/Math.pow(2,32),u[i-1][14]=Math.floor(u[i-1][14]),u[i-1][15]=8*(r.length-1)&4294967295;for(var l=0;l<i;++l){for(var d=new Uint32Array(80),v=0;v<16;++v)d[v]=u[l][v];for(var p=16;p<80;++p)d[p]=C(d[p-3]^d[p-8]^d[p-14]^d[p-16],1);for(var h=n[0],y=n[1],g=n[2],m=n[3],w=n[4],b=0;b<80;++b){var A=Math.floor(b/20),U=C(h,5)+I(A,y,g,m)+w+e[A]+d[b]>>>0;w=m,m=g,g=C(y,30)>>>0,y=h,h=U}n[0]=n[0]+h>>>0,n[1]=n[1]+y>>>0,n[2]=n[2]+g>>>0,n[3]=n[3]+m>>>0,n[4]=n[4]+w>>>0}return[n[0]>>24&255,n[0]>>16&255,n[0]>>8&255,255&n[0],n[1]>>24&255,n[1]>>16&255,n[1]>>8&255,255&n[1],n[2]>>24&255,n[2]>>16&255,n[2]>>8&255,255&n[2],n[3]>>24&255,n[3]>>16&255,n[3]>>8&255,255&n[3],n[4]>>24&255,n[4]>>16&255,n[4]>>8&255,255&n[4]]}));r.NIL="00000000-0000-0000-0000-000000000000",r.parse=v,r.stringify=c,r.v1=function(r,e,n){var o=e&&n||0,a=e||new Array(16),f=(r=r||{}).node||i,s=void 0!==r.clockseq?r.clockseq:u;if(null==f||null==s){var v=r.random||(r.rng||t)();null==f&&(f=i=[1|v[0],v[1],v[2],v[3],v[4],v[5]]),null==s&&(s=u=16383&(v[6]<<8|v[7]))}var p=void 0!==r.msecs?r.msecs:Date.now(),h=void 0!==r.nsecs?r.nsecs:d+1,y=p-l+(h-d)/1e4;if(y<0&&void 0===r.clockseq&&(s=s+1&16383),(y<0||p>l)&&void 0===r.nsecs&&(h=0),h>=1e4)throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");l=p,d=h,u=s;var g=(1e4*(268435455&(p+=122192928e5))+h)%4294967296;a[o++]=g>>>24&255,a[o++]=g>>>16&255,a[o++]=g>>>8&255,a[o++]=255&g;var m=p/4294967296*1e4&268435455;a[o++]=m>>>8&255,a[o++]=255&m,a[o++]=m>>>24&15|16,a[o++]=m>>>16&255,a[o++]=s>>>8|128,a[o++]=255&s;for(var w=0;w<6;++w)a[o+w]=f[w];return e||c(a)},r.v3=U,r.v4=function(r,e,n){var o=(r=r||{}).random||(r.rng||t)();if(o[6]=15&o[6]|64,o[8]=63&o[8]|128,e){n=n||0;for(var a=0;a<16;++a)e[n+a]=o[a];return e}return c(o)},r.v5=R,r.validate=a,r.version=function(r){if(!a(r))throw TypeError("Invalid UUID");return parseInt(r.substr(14,1),16)},Object.defineProperty(r,"__esModule",{value:!0})})); \ No newline at end of file
diff --git a/node_modules/uuid/dist/umd/uuidNIL.min.js b/node_modules/uuid/dist/umd/uuidNIL.min.js
new file mode 100644
index 0000000..30b28a7
--- /dev/null
+++ b/node_modules/uuid/dist/umd/uuidNIL.min.js
@@ -0,0 +1 @@
+!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?module.exports=n():"function"==typeof define&&define.amd?define(n):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidNIL=n()}(this,(function(){"use strict";return"00000000-0000-0000-0000-000000000000"})); \ No newline at end of file
diff --git a/node_modules/uuid/dist/umd/uuidParse.min.js b/node_modules/uuid/dist/umd/uuidParse.min.js
new file mode 100644
index 0000000..d48ea6a
--- /dev/null
+++ b/node_modules/uuid/dist/umd/uuidParse.min.js
@@ -0,0 +1 @@
+!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?module.exports=n():"function"==typeof define&&define.amd?define(n):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidParse=n()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;return function(n){if(!function(n){return"string"==typeof n&&e.test(n)}(n))throw TypeError("Invalid UUID");var t,i=new Uint8Array(16);return i[0]=(t=parseInt(n.slice(0,8),16))>>>24,i[1]=t>>>16&255,i[2]=t>>>8&255,i[3]=255&t,i[4]=(t=parseInt(n.slice(9,13),16))>>>8,i[5]=255&t,i[6]=(t=parseInt(n.slice(14,18),16))>>>8,i[7]=255&t,i[8]=(t=parseInt(n.slice(19,23),16))>>>8,i[9]=255&t,i[10]=(t=parseInt(n.slice(24,36),16))/1099511627776&255,i[11]=t/4294967296&255,i[12]=t>>>24&255,i[13]=t>>>16&255,i[14]=t>>>8&255,i[15]=255&t,i}})); \ No newline at end of file
diff --git a/node_modules/uuid/dist/umd/uuidStringify.min.js b/node_modules/uuid/dist/umd/uuidStringify.min.js
new file mode 100644
index 0000000..fd39adc
--- /dev/null
+++ b/node_modules/uuid/dist/umd/uuidStringify.min.js
@@ -0,0 +1 @@
+!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidStringify=t()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function t(t){return"string"==typeof t&&e.test(t)}for(var i=[],n=0;n<256;++n)i.push((n+256).toString(16).substr(1));return function(e){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,f=(i[e[n+0]]+i[e[n+1]]+i[e[n+2]]+i[e[n+3]]+"-"+i[e[n+4]]+i[e[n+5]]+"-"+i[e[n+6]]+i[e[n+7]]+"-"+i[e[n+8]]+i[e[n+9]]+"-"+i[e[n+10]]+i[e[n+11]]+i[e[n+12]]+i[e[n+13]]+i[e[n+14]]+i[e[n+15]]).toLowerCase();if(!t(f))throw TypeError("Stringified UUID is invalid");return f}})); \ No newline at end of file
diff --git a/node_modules/uuid/dist/umd/uuidValidate.min.js b/node_modules/uuid/dist/umd/uuidValidate.min.js
new file mode 100644
index 0000000..378e5b9
--- /dev/null
+++ b/node_modules/uuid/dist/umd/uuidValidate.min.js
@@ -0,0 +1 @@
+!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidValidate=t()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;return function(t){return"string"==typeof t&&e.test(t)}})); \ No newline at end of file
diff --git a/node_modules/uuid/dist/umd/uuidVersion.min.js b/node_modules/uuid/dist/umd/uuidVersion.min.js
new file mode 100644
index 0000000..274bb09
--- /dev/null
+++ b/node_modules/uuid/dist/umd/uuidVersion.min.js
@@ -0,0 +1 @@
+!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?module.exports=t():"function"==typeof define&&define.amd?define(t):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidVersion=t()}(this,(function(){"use strict";var e=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;return function(t){if(!function(t){return"string"==typeof t&&e.test(t)}(t))throw TypeError("Invalid UUID");return parseInt(t.substr(14,1),16)}})); \ No newline at end of file
diff --git a/node_modules/uuid/dist/umd/uuidv1.min.js b/node_modules/uuid/dist/umd/uuidv1.min.js
new file mode 100644
index 0000000..2622889
--- /dev/null
+++ b/node_modules/uuid/dist/umd/uuidv1.min.js
@@ -0,0 +1 @@
+!function(e,o){"object"==typeof exports&&"undefined"!=typeof module?module.exports=o():"function"==typeof define&&define.amd?define(o):(e="undefined"!=typeof globalThis?globalThis:e||self).uuidv1=o()}(this,(function(){"use strict";var e,o=new Uint8Array(16);function t(){if(!e&&!(e="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return e(o)}var n=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function r(e){return"string"==typeof e&&n.test(e)}for(var i,u,s=[],a=0;a<256;++a)s.push((a+256).toString(16).substr(1));var d=0,f=0;return function(e,o,n){var a=o&&n||0,c=o||new Array(16),l=(e=e||{}).node||i,p=void 0!==e.clockseq?e.clockseq:u;if(null==l||null==p){var v=e.random||(e.rng||t)();null==l&&(l=i=[1|v[0],v[1],v[2],v[3],v[4],v[5]]),null==p&&(p=u=16383&(v[6]<<8|v[7]))}var y=void 0!==e.msecs?e.msecs:Date.now(),m=void 0!==e.nsecs?e.nsecs:f+1,g=y-d+(m-f)/1e4;if(g<0&&void 0===e.clockseq&&(p=p+1&16383),(g<0||y>d)&&void 0===e.nsecs&&(m=0),m>=1e4)throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");d=y,f=m,u=p;var h=(1e4*(268435455&(y+=122192928e5))+m)%4294967296;c[a++]=h>>>24&255,c[a++]=h>>>16&255,c[a++]=h>>>8&255,c[a++]=255&h;var w=y/4294967296*1e4&268435455;c[a++]=w>>>8&255,c[a++]=255&w,c[a++]=w>>>24&15|16,c[a++]=w>>>16&255,c[a++]=p>>>8|128,c[a++]=255&p;for(var b=0;b<6;++b)c[a+b]=l[b];return o||function(e){var o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,t=(s[e[o+0]]+s[e[o+1]]+s[e[o+2]]+s[e[o+3]]+"-"+s[e[o+4]]+s[e[o+5]]+"-"+s[e[o+6]]+s[e[o+7]]+"-"+s[e[o+8]]+s[e[o+9]]+"-"+s[e[o+10]]+s[e[o+11]]+s[e[o+12]]+s[e[o+13]]+s[e[o+14]]+s[e[o+15]]).toLowerCase();if(!r(t))throw TypeError("Stringified UUID is invalid");return t}(c)}})); \ No newline at end of file
diff --git a/node_modules/uuid/dist/umd/uuidv3.min.js b/node_modules/uuid/dist/umd/uuidv3.min.js
new file mode 100644
index 0000000..8d37b62
--- /dev/null
+++ b/node_modules/uuid/dist/umd/uuidv3.min.js
@@ -0,0 +1 @@
+!function(n,r){"object"==typeof exports&&"undefined"!=typeof module?module.exports=r():"function"==typeof define&&define.amd?define(r):(n="undefined"!=typeof globalThis?globalThis:n||self).uuidv3=r()}(this,(function(){"use strict";var n=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function r(r){return"string"==typeof r&&n.test(r)}for(var e=[],t=0;t<256;++t)e.push((t+256).toString(16).substr(1));function i(n){return 14+(n+64>>>9<<4)+1}function o(n,r){var e=(65535&n)+(65535&r);return(n>>16)+(r>>16)+(e>>16)<<16|65535&e}function a(n,r,e,t,i,a){return o((f=o(o(r,n),o(t,a)))<<(u=i)|f>>>32-u,e);var f,u}function f(n,r,e,t,i,o,f){return a(r&e|~r&t,n,r,i,o,f)}function u(n,r,e,t,i,o,f){return a(r&t|e&~t,n,r,i,o,f)}function c(n,r,e,t,i,o,f){return a(r^e^t,n,r,i,o,f)}function s(n,r,e,t,i,o,f){return a(e^(r|~t),n,r,i,o,f)}return function(n,t,i){function o(n,o,a,f){if("string"==typeof n&&(n=function(n){n=unescape(encodeURIComponent(n));for(var r=[],e=0;e<n.length;++e)r.push(n.charCodeAt(e));return r}(n)),"string"==typeof o&&(o=function(n){if(!r(n))throw TypeError("Invalid UUID");var e,t=new Uint8Array(16);return t[0]=(e=parseInt(n.slice(0,8),16))>>>24,t[1]=e>>>16&255,t[2]=e>>>8&255,t[3]=255&e,t[4]=(e=parseInt(n.slice(9,13),16))>>>8,t[5]=255&e,t[6]=(e=parseInt(n.slice(14,18),16))>>>8,t[7]=255&e,t[8]=(e=parseInt(n.slice(19,23),16))>>>8,t[9]=255&e,t[10]=(e=parseInt(n.slice(24,36),16))/1099511627776&255,t[11]=e/4294967296&255,t[12]=e>>>24&255,t[13]=e>>>16&255,t[14]=e>>>8&255,t[15]=255&e,t}(o)),16!==o.length)throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)");var u=new Uint8Array(16+n.length);if(u.set(o),u.set(n,o.length),(u=i(u))[6]=15&u[6]|t,u[8]=63&u[8]|128,a){f=f||0;for(var c=0;c<16;++c)a[f+c]=u[c];return a}return function(n){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,i=(e[n[t+0]]+e[n[t+1]]+e[n[t+2]]+e[n[t+3]]+"-"+e[n[t+4]]+e[n[t+5]]+"-"+e[n[t+6]]+e[n[t+7]]+"-"+e[n[t+8]]+e[n[t+9]]+"-"+e[n[t+10]]+e[n[t+11]]+e[n[t+12]]+e[n[t+13]]+e[n[t+14]]+e[n[t+15]]).toLowerCase();if(!r(i))throw TypeError("Stringified UUID is invalid");return i}(u)}try{o.name=n}catch(n){}return o.DNS="6ba7b810-9dad-11d1-80b4-00c04fd430c8",o.URL="6ba7b811-9dad-11d1-80b4-00c04fd430c8",o}("v3",48,(function(n){if("string"==typeof n){var r=unescape(encodeURIComponent(n));n=new Uint8Array(r.length);for(var e=0;e<r.length;++e)n[e]=r.charCodeAt(e)}return function(n){for(var r=[],e=32*n.length,t="0123456789abcdef",i=0;i<e;i+=8){var o=n[i>>5]>>>i%32&255,a=parseInt(t.charAt(o>>>4&15)+t.charAt(15&o),16);r.push(a)}return r}(function(n,r){n[r>>5]|=128<<r%32,n[i(r)-1]=r;for(var e=1732584193,t=-271733879,a=-1732584194,l=271733878,d=0;d<n.length;d+=16){var p=e,h=t,v=a,g=l;e=f(e,t,a,l,n[d],7,-680876936),l=f(l,e,t,a,n[d+1],12,-389564586),a=f(a,l,e,t,n[d+2],17,606105819),t=f(t,a,l,e,n[d+3],22,-1044525330),e=f(e,t,a,l,n[d+4],7,-176418897),l=f(l,e,t,a,n[d+5],12,1200080426),a=f(a,l,e,t,n[d+6],17,-1473231341),t=f(t,a,l,e,n[d+7],22,-45705983),e=f(e,t,a,l,n[d+8],7,1770035416),l=f(l,e,t,a,n[d+9],12,-1958414417),a=f(a,l,e,t,n[d+10],17,-42063),t=f(t,a,l,e,n[d+11],22,-1990404162),e=f(e,t,a,l,n[d+12],7,1804603682),l=f(l,e,t,a,n[d+13],12,-40341101),a=f(a,l,e,t,n[d+14],17,-1502002290),e=u(e,t=f(t,a,l,e,n[d+15],22,1236535329),a,l,n[d+1],5,-165796510),l=u(l,e,t,a,n[d+6],9,-1069501632),a=u(a,l,e,t,n[d+11],14,643717713),t=u(t,a,l,e,n[d],20,-373897302),e=u(e,t,a,l,n[d+5],5,-701558691),l=u(l,e,t,a,n[d+10],9,38016083),a=u(a,l,e,t,n[d+15],14,-660478335),t=u(t,a,l,e,n[d+4],20,-405537848),e=u(e,t,a,l,n[d+9],5,568446438),l=u(l,e,t,a,n[d+14],9,-1019803690),a=u(a,l,e,t,n[d+3],14,-187363961),t=u(t,a,l,e,n[d+8],20,1163531501),e=u(e,t,a,l,n[d+13],5,-1444681467),l=u(l,e,t,a,n[d+2],9,-51403784),a=u(a,l,e,t,n[d+7],14,1735328473),e=c(e,t=u(t,a,l,e,n[d+12],20,-1926607734),a,l,n[d+5],4,-378558),l=c(l,e,t,a,n[d+8],11,-2022574463),a=c(a,l,e,t,n[d+11],16,1839030562),t=c(t,a,l,e,n[d+14],23,-35309556),e=c(e,t,a,l,n[d+1],4,-1530992060),l=c(l,e,t,a,n[d+4],11,1272893353),a=c(a,l,e,t,n[d+7],16,-155497632),t=c(t,a,l,e,n[d+10],23,-1094730640),e=c(e,t,a,l,n[d+13],4,681279174),l=c(l,e,t,a,n[d],11,-358537222),a=c(a,l,e,t,n[d+3],16,-722521979),t=c(t,a,l,e,n[d+6],23,76029189),e=c(e,t,a,l,n[d+9],4,-640364487),l=c(l,e,t,a,n[d+12],11,-421815835),a=c(a,l,e,t,n[d+15],16,530742520),e=s(e,t=c(t,a,l,e,n[d+2],23,-995338651),a,l,n[d],6,-198630844),l=s(l,e,t,a,n[d+7],10,1126891415),a=s(a,l,e,t,n[d+14],15,-1416354905),t=s(t,a,l,e,n[d+5],21,-57434055),e=s(e,t,a,l,n[d+12],6,1700485571),l=s(l,e,t,a,n[d+3],10,-1894986606),a=s(a,l,e,t,n[d+10],15,-1051523),t=s(t,a,l,e,n[d+1],21,-2054922799),e=s(e,t,a,l,n[d+8],6,1873313359),l=s(l,e,t,a,n[d+15],10,-30611744),a=s(a,l,e,t,n[d+6],15,-1560198380),t=s(t,a,l,e,n[d+13],21,1309151649),e=s(e,t,a,l,n[d+4],6,-145523070),l=s(l,e,t,a,n[d+11],10,-1120210379),a=s(a,l,e,t,n[d+2],15,718787259),t=s(t,a,l,e,n[d+9],21,-343485551),e=o(e,p),t=o(t,h),a=o(a,v),l=o(l,g)}return[e,t,a,l]}(function(n){if(0===n.length)return[];for(var r=8*n.length,e=new Uint32Array(i(r)),t=0;t<r;t+=8)e[t>>5]|=(255&n[t/8])<<t%32;return e}(n),8*n.length))}))})); \ No newline at end of file
diff --git a/node_modules/uuid/dist/umd/uuidv4.min.js b/node_modules/uuid/dist/umd/uuidv4.min.js
new file mode 100644
index 0000000..e9df84b
--- /dev/null
+++ b/node_modules/uuid/dist/umd/uuidv4.min.js
@@ -0,0 +1 @@
+!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(t="undefined"!=typeof globalThis?globalThis:t||self).uuidv4=e()}(this,(function(){"use strict";var t,e=new Uint8Array(16);function o(){if(!t&&!(t="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return t(e)}var n=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function r(t){return"string"==typeof t&&n.test(t)}for(var i=[],u=0;u<256;++u)i.push((u+256).toString(16).substr(1));return function(t,e,n){var u=(t=t||{}).random||(t.rng||o)();if(u[6]=15&u[6]|64,u[8]=63&u[8]|128,e){n=n||0;for(var f=0;f<16;++f)e[n+f]=u[f];return e}return function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,o=(i[t[e+0]]+i[t[e+1]]+i[t[e+2]]+i[t[e+3]]+"-"+i[t[e+4]]+i[t[e+5]]+"-"+i[t[e+6]]+i[t[e+7]]+"-"+i[t[e+8]]+i[t[e+9]]+"-"+i[t[e+10]]+i[t[e+11]]+i[t[e+12]]+i[t[e+13]]+i[t[e+14]]+i[t[e+15]]).toLowerCase();if(!r(o))throw TypeError("Stringified UUID is invalid");return o}(u)}})); \ No newline at end of file
diff --git a/node_modules/uuid/dist/umd/uuidv5.min.js b/node_modules/uuid/dist/umd/uuidv5.min.js
new file mode 100644
index 0000000..ba6fc63
--- /dev/null
+++ b/node_modules/uuid/dist/umd/uuidv5.min.js
@@ -0,0 +1 @@
+!function(r,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(r="undefined"!=typeof globalThis?globalThis:r||self).uuidv5=e()}(this,(function(){"use strict";var r=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function e(e){return"string"==typeof e&&r.test(e)}for(var t=[],n=0;n<256;++n)t.push((n+256).toString(16).substr(1));function a(r,e,t,n){switch(r){case 0:return e&t^~e&n;case 1:return e^t^n;case 2:return e&t^e&n^t&n;case 3:return e^t^n}}function o(r,e){return r<<e|r>>>32-e}return function(r,n,a){function o(r,o,i,f){if("string"==typeof r&&(r=function(r){r=unescape(encodeURIComponent(r));for(var e=[],t=0;t<r.length;++t)e.push(r.charCodeAt(t));return e}(r)),"string"==typeof o&&(o=function(r){if(!e(r))throw TypeError("Invalid UUID");var t,n=new Uint8Array(16);return n[0]=(t=parseInt(r.slice(0,8),16))>>>24,n[1]=t>>>16&255,n[2]=t>>>8&255,n[3]=255&t,n[4]=(t=parseInt(r.slice(9,13),16))>>>8,n[5]=255&t,n[6]=(t=parseInt(r.slice(14,18),16))>>>8,n[7]=255&t,n[8]=(t=parseInt(r.slice(19,23),16))>>>8,n[9]=255&t,n[10]=(t=parseInt(r.slice(24,36),16))/1099511627776&255,n[11]=t/4294967296&255,n[12]=t>>>24&255,n[13]=t>>>16&255,n[14]=t>>>8&255,n[15]=255&t,n}(o)),16!==o.length)throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)");var s=new Uint8Array(16+r.length);if(s.set(o),s.set(r,o.length),(s=a(s))[6]=15&s[6]|n,s[8]=63&s[8]|128,i){f=f||0;for(var u=0;u<16;++u)i[f+u]=s[u];return i}return function(r){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,a=(t[r[n+0]]+t[r[n+1]]+t[r[n+2]]+t[r[n+3]]+"-"+t[r[n+4]]+t[r[n+5]]+"-"+t[r[n+6]]+t[r[n+7]]+"-"+t[r[n+8]]+t[r[n+9]]+"-"+t[r[n+10]]+t[r[n+11]]+t[r[n+12]]+t[r[n+13]]+t[r[n+14]]+t[r[n+15]]).toLowerCase();if(!e(a))throw TypeError("Stringified UUID is invalid");return a}(s)}try{o.name=r}catch(r){}return o.DNS="6ba7b810-9dad-11d1-80b4-00c04fd430c8",o.URL="6ba7b811-9dad-11d1-80b4-00c04fd430c8",o}("v5",80,(function(r){var e=[1518500249,1859775393,2400959708,3395469782],t=[1732584193,4023233417,2562383102,271733878,3285377520];if("string"==typeof r){var n=unescape(encodeURIComponent(r));r=[];for(var i=0;i<n.length;++i)r.push(n.charCodeAt(i))}else Array.isArray(r)||(r=Array.prototype.slice.call(r));r.push(128);for(var f=r.length/4+2,s=Math.ceil(f/16),u=new Array(s),c=0;c<s;++c){for(var l=new Uint32Array(16),p=0;p<16;++p)l[p]=r[64*c+4*p]<<24|r[64*c+4*p+1]<<16|r[64*c+4*p+2]<<8|r[64*c+4*p+3];u[c]=l}u[s-1][14]=8*(r.length-1)/Math.pow(2,32),u[s-1][14]=Math.floor(u[s-1][14]),u[s-1][15]=8*(r.length-1)&4294967295;for(var d=0;d<s;++d){for(var h=new Uint32Array(80),v=0;v<16;++v)h[v]=u[d][v];for(var y=16;y<80;++y)h[y]=o(h[y-3]^h[y-8]^h[y-14]^h[y-16],1);for(var g=t[0],b=t[1],w=t[2],U=t[3],A=t[4],I=0;I<80;++I){var m=Math.floor(I/20),C=o(g,5)+a(m,b,w,U)+A+e[m]+h[I]>>>0;A=U,U=w,w=o(b,30)>>>0,b=g,g=C}t[0]=t[0]+g>>>0,t[1]=t[1]+b>>>0,t[2]=t[2]+w>>>0,t[3]=t[3]+U>>>0,t[4]=t[4]+A>>>0}return[t[0]>>24&255,t[0]>>16&255,t[0]>>8&255,255&t[0],t[1]>>24&255,t[1]>>16&255,t[1]>>8&255,255&t[1],t[2]>>24&255,t[2]>>16&255,t[2]>>8&255,255&t[2],t[3]>>24&255,t[3]>>16&255,t[3]>>8&255,255&t[3],t[4]>>24&255,t[4]>>16&255,t[4]>>8&255,255&t[4]]}))})); \ No newline at end of file
diff --git a/node_modules/uuid/dist/uuid-bin.js b/node_modules/uuid/dist/uuid-bin.js
new file mode 100644
index 0000000..50a7a9f
--- /dev/null
+++ b/node_modules/uuid/dist/uuid-bin.js
@@ -0,0 +1,85 @@
+"use strict";
+
+var _assert = _interopRequireDefault(require("assert"));
+
+var _v = _interopRequireDefault(require("./v1.js"));
+
+var _v2 = _interopRequireDefault(require("./v3.js"));
+
+var _v3 = _interopRequireDefault(require("./v4.js"));
+
+var _v4 = _interopRequireDefault(require("./v5.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function usage() {
+ console.log('Usage:');
+ console.log(' uuid');
+ console.log(' uuid v1');
+ console.log(' uuid v3 <name> <namespace uuid>');
+ console.log(' uuid v4');
+ console.log(' uuid v5 <name> <namespace uuid>');
+ console.log(' uuid --help');
+ console.log('\nNote: <namespace uuid> may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122');
+}
+
+const args = process.argv.slice(2);
+
+if (args.indexOf('--help') >= 0) {
+ usage();
+ process.exit(0);
+}
+
+const version = args.shift() || 'v4';
+
+switch (version) {
+ case 'v1':
+ console.log((0, _v.default)());
+ break;
+
+ case 'v3':
+ {
+ const name = args.shift();
+ let namespace = args.shift();
+ (0, _assert.default)(name != null, 'v3 name not specified');
+ (0, _assert.default)(namespace != null, 'v3 namespace not specified');
+
+ if (namespace === 'URL') {
+ namespace = _v2.default.URL;
+ }
+
+ if (namespace === 'DNS') {
+ namespace = _v2.default.DNS;
+ }
+
+ console.log((0, _v2.default)(name, namespace));
+ break;
+ }
+
+ case 'v4':
+ console.log((0, _v3.default)());
+ break;
+
+ case 'v5':
+ {
+ const name = args.shift();
+ let namespace = args.shift();
+ (0, _assert.default)(name != null, 'v5 name not specified');
+ (0, _assert.default)(namespace != null, 'v5 namespace not specified');
+
+ if (namespace === 'URL') {
+ namespace = _v4.default.URL;
+ }
+
+ if (namespace === 'DNS') {
+ namespace = _v4.default.DNS;
+ }
+
+ console.log((0, _v4.default)(name, namespace));
+ break;
+ }
+
+ default:
+ usage();
+ process.exit(1);
+} \ No newline at end of file
diff --git a/node_modules/uuid/dist/v1.js b/node_modules/uuid/dist/v1.js
new file mode 100644
index 0000000..abb9b3d
--- /dev/null
+++ b/node_modules/uuid/dist/v1.js
@@ -0,0 +1,107 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _rng = _interopRequireDefault(require("./rng.js"));
+
+var _stringify = _interopRequireDefault(require("./stringify.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+// **`v1()` - Generate time-based UUID**
+//
+// Inspired by https://github.com/LiosK/UUID.js
+// and http://docs.python.org/library/uuid.html
+let _nodeId;
+
+let _clockseq; // Previous uuid creation time
+
+
+let _lastMSecs = 0;
+let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details
+
+function v1(options, buf, offset) {
+ let i = buf && offset || 0;
+ const b = buf || new Array(16);
+ options = options || {};
+ let node = options.node || _nodeId;
+ let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not
+ // specified. We do this lazily to minimize issues related to insufficient
+ // system entropy. See #189
+
+ if (node == null || clockseq == null) {
+ const seedBytes = options.random || (options.rng || _rng.default)();
+
+ if (node == null) {
+ // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
+ node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];
+ }
+
+ if (clockseq == null) {
+ // Per 4.2.2, randomize (14 bit) clockseq
+ clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
+ }
+ } // UUID timestamps are 100 nano-second units since the Gregorian epoch,
+ // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
+ // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
+ // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
+
+
+ let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock
+ // cycle to simulate higher resolution clock
+
+ let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)
+
+ const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression
+
+ if (dt < 0 && options.clockseq === undefined) {
+ clockseq = clockseq + 1 & 0x3fff;
+ } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
+ // time interval
+
+
+ if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
+ nsecs = 0;
+ } // Per 4.2.1.2 Throw error if too many uuids are requested
+
+
+ if (nsecs >= 10000) {
+ throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");
+ }
+
+ _lastMSecs = msecs;
+ _lastNSecs = nsecs;
+ _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch
+
+ msecs += 12219292800000; // `time_low`
+
+ const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
+ b[i++] = tl >>> 24 & 0xff;
+ b[i++] = tl >>> 16 & 0xff;
+ b[i++] = tl >>> 8 & 0xff;
+ b[i++] = tl & 0xff; // `time_mid`
+
+ const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;
+ b[i++] = tmh >>> 8 & 0xff;
+ b[i++] = tmh & 0xff; // `time_high_and_version`
+
+ b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
+
+ b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
+
+ b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`
+
+ b[i++] = clockseq & 0xff; // `node`
+
+ for (let n = 0; n < 6; ++n) {
+ b[i + n] = node[n];
+ }
+
+ return buf || (0, _stringify.default)(b);
+}
+
+var _default = v1;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/v3.js b/node_modules/uuid/dist/v3.js
new file mode 100644
index 0000000..6b47ff5
--- /dev/null
+++ b/node_modules/uuid/dist/v3.js
@@ -0,0 +1,16 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _v = _interopRequireDefault(require("./v35.js"));
+
+var _md = _interopRequireDefault(require("./md5.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+const v3 = (0, _v.default)('v3', 0x30, _md.default);
+var _default = v3;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/v35.js b/node_modules/uuid/dist/v35.js
new file mode 100644
index 0000000..f784c63
--- /dev/null
+++ b/node_modules/uuid/dist/v35.js
@@ -0,0 +1,78 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = _default;
+exports.URL = exports.DNS = void 0;
+
+var _stringify = _interopRequireDefault(require("./stringify.js"));
+
+var _parse = _interopRequireDefault(require("./parse.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function stringToBytes(str) {
+ str = unescape(encodeURIComponent(str)); // UTF8 escape
+
+ const bytes = [];
+
+ for (let i = 0; i < str.length; ++i) {
+ bytes.push(str.charCodeAt(i));
+ }
+
+ return bytes;
+}
+
+const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';
+exports.DNS = DNS;
+const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';
+exports.URL = URL;
+
+function _default(name, version, hashfunc) {
+ function generateUUID(value, namespace, buf, offset) {
+ if (typeof value === 'string') {
+ value = stringToBytes(value);
+ }
+
+ if (typeof namespace === 'string') {
+ namespace = (0, _parse.default)(namespace);
+ }
+
+ if (namespace.length !== 16) {
+ throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');
+ } // Compute hash of namespace and value, Per 4.3
+ // Future: Use spread syntax when supported on all platforms, e.g. `bytes =
+ // hashfunc([...namespace, ... value])`
+
+
+ let bytes = new Uint8Array(16 + value.length);
+ bytes.set(namespace);
+ bytes.set(value, namespace.length);
+ bytes = hashfunc(bytes);
+ bytes[6] = bytes[6] & 0x0f | version;
+ bytes[8] = bytes[8] & 0x3f | 0x80;
+
+ if (buf) {
+ offset = offset || 0;
+
+ for (let i = 0; i < 16; ++i) {
+ buf[offset + i] = bytes[i];
+ }
+
+ return buf;
+ }
+
+ return (0, _stringify.default)(bytes);
+ } // Function#name is not settable on some platforms (#270)
+
+
+ try {
+ generateUUID.name = name; // eslint-disable-next-line no-empty
+ } catch (err) {} // For CommonJS default export support
+
+
+ generateUUID.DNS = DNS;
+ generateUUID.URL = URL;
+ return generateUUID;
+} \ No newline at end of file
diff --git a/node_modules/uuid/dist/v4.js b/node_modules/uuid/dist/v4.js
new file mode 100644
index 0000000..838ce0b
--- /dev/null
+++ b/node_modules/uuid/dist/v4.js
@@ -0,0 +1,37 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _rng = _interopRequireDefault(require("./rng.js"));
+
+var _stringify = _interopRequireDefault(require("./stringify.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function v4(options, buf, offset) {
+ options = options || {};
+
+ const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
+
+
+ rnds[6] = rnds[6] & 0x0f | 0x40;
+ rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided
+
+ if (buf) {
+ offset = offset || 0;
+
+ for (let i = 0; i < 16; ++i) {
+ buf[offset + i] = rnds[i];
+ }
+
+ return buf;
+ }
+
+ return (0, _stringify.default)(rnds);
+}
+
+var _default = v4;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/v5.js b/node_modules/uuid/dist/v5.js
new file mode 100644
index 0000000..99d615e
--- /dev/null
+++ b/node_modules/uuid/dist/v5.js
@@ -0,0 +1,16 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _v = _interopRequireDefault(require("./v35.js"));
+
+var _sha = _interopRequireDefault(require("./sha1.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+const v5 = (0, _v.default)('v5', 0x50, _sha.default);
+var _default = v5;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/validate.js b/node_modules/uuid/dist/validate.js
new file mode 100644
index 0000000..fd05215
--- /dev/null
+++ b/node_modules/uuid/dist/validate.js
@@ -0,0 +1,17 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _regex = _interopRequireDefault(require("./regex.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function validate(uuid) {
+ return typeof uuid === 'string' && _regex.default.test(uuid);
+}
+
+var _default = validate;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/dist/version.js b/node_modules/uuid/dist/version.js
new file mode 100644
index 0000000..b72949c
--- /dev/null
+++ b/node_modules/uuid/dist/version.js
@@ -0,0 +1,21 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+
+var _validate = _interopRequireDefault(require("./validate.js"));
+
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+
+function version(uuid) {
+ if (!(0, _validate.default)(uuid)) {
+ throw TypeError('Invalid UUID');
+ }
+
+ return parseInt(uuid.substr(14, 1), 16);
+}
+
+var _default = version;
+exports.default = _default; \ No newline at end of file
diff --git a/node_modules/uuid/package.json b/node_modules/uuid/package.json
new file mode 100644
index 0000000..f0ab371
--- /dev/null
+++ b/node_modules/uuid/package.json
@@ -0,0 +1,135 @@
+{
+ "name": "uuid",
+ "version": "8.3.2",
+ "description": "RFC4122 (v1, v4, and v5) UUIDs",
+ "commitlint": {
+ "extends": [
+ "@commitlint/config-conventional"
+ ]
+ },
+ "keywords": [
+ "uuid",
+ "guid",
+ "rfc4122"
+ ],
+ "license": "MIT",
+ "bin": {
+ "uuid": "./dist/bin/uuid"
+ },
+ "sideEffects": false,
+ "main": "./dist/index.js",
+ "exports": {
+ ".": {
+ "node": {
+ "module": "./dist/esm-node/index.js",
+ "require": "./dist/index.js",
+ "import": "./wrapper.mjs"
+ },
+ "default": "./dist/esm-browser/index.js"
+ },
+ "./package.json": "./package.json"
+ },
+ "module": "./dist/esm-node/index.js",
+ "browser": {
+ "./dist/md5.js": "./dist/md5-browser.js",
+ "./dist/rng.js": "./dist/rng-browser.js",
+ "./dist/sha1.js": "./dist/sha1-browser.js",
+ "./dist/esm-node/index.js": "./dist/esm-browser/index.js"
+ },
+ "files": [
+ "CHANGELOG.md",
+ "CONTRIBUTING.md",
+ "LICENSE.md",
+ "README.md",
+ "dist",
+ "wrapper.mjs"
+ ],
+ "devDependencies": {
+ "@babel/cli": "7.11.6",
+ "@babel/core": "7.11.6",
+ "@babel/preset-env": "7.11.5",
+ "@commitlint/cli": "11.0.0",
+ "@commitlint/config-conventional": "11.0.0",
+ "@rollup/plugin-node-resolve": "9.0.0",
+ "babel-eslint": "10.1.0",
+ "bundlewatch": "0.3.1",
+ "eslint": "7.10.0",
+ "eslint-config-prettier": "6.12.0",
+ "eslint-config-standard": "14.1.1",
+ "eslint-plugin-import": "2.22.1",
+ "eslint-plugin-node": "11.1.0",
+ "eslint-plugin-prettier": "3.1.4",
+ "eslint-plugin-promise": "4.2.1",
+ "eslint-plugin-standard": "4.0.1",
+ "husky": "4.3.0",
+ "jest": "25.5.4",
+ "lint-staged": "10.4.0",
+ "npm-run-all": "4.1.5",
+ "optional-dev-dependency": "2.0.1",
+ "prettier": "2.1.2",
+ "random-seed": "0.3.0",
+ "rollup": "2.28.2",
+ "rollup-plugin-terser": "7.0.2",
+ "runmd": "1.3.2",
+ "standard-version": "9.0.0"
+ },
+ "optionalDevDependencies": {
+ "@wdio/browserstack-service": "6.4.0",
+ "@wdio/cli": "6.4.0",
+ "@wdio/jasmine-framework": "6.4.0",
+ "@wdio/local-runner": "6.4.0",
+ "@wdio/spec-reporter": "6.4.0",
+ "@wdio/static-server-service": "6.4.0",
+ "@wdio/sync": "6.4.0"
+ },
+ "scripts": {
+ "examples:browser:webpack:build": "cd examples/browser-webpack && npm install && npm run build",
+ "examples:browser:rollup:build": "cd examples/browser-rollup && npm install && npm run build",
+ "examples:node:commonjs:test": "cd examples/node-commonjs && npm install && npm test",
+ "examples:node:esmodules:test": "cd examples/node-esmodules && npm install && npm test",
+ "lint": "npm run eslint:check && npm run prettier:check",
+ "eslint:check": "eslint src/ test/ examples/ *.js",
+ "eslint:fix": "eslint --fix src/ test/ examples/ *.js",
+ "pretest": "[ -n $CI ] || npm run build",
+ "test": "BABEL_ENV=commonjs node --throw-deprecation node_modules/.bin/jest test/unit/",
+ "pretest:browser": "optional-dev-dependency && npm run build && npm-run-all --parallel examples:browser:**",
+ "test:browser": "wdio run ./wdio.conf.js",
+ "pretest:node": "npm run build",
+ "test:node": "npm-run-all --parallel examples:node:**",
+ "test:pack": "./scripts/testpack.sh",
+ "pretest:benchmark": "npm run build",
+ "test:benchmark": "cd examples/benchmark && npm install && npm test",
+ "prettier:check": "prettier --ignore-path .prettierignore --check '**/*.{js,jsx,json,md}'",
+ "prettier:fix": "prettier --ignore-path .prettierignore --write '**/*.{js,jsx,json,md}'",
+ "bundlewatch": "npm run pretest:browser && bundlewatch --config bundlewatch.config.json",
+ "md": "runmd --watch --output=README.md README_js.md",
+ "docs": "( node --version | grep -q 'v12' ) && ( npm run build && runmd --output=README.md README_js.md )",
+ "docs:diff": "npm run docs && git diff --quiet README.md",
+ "build": "./scripts/build.sh",
+ "prepack": "npm run build",
+ "release": "standard-version --no-verify"
+ },
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/uuidjs/uuid.git"
+ },
+ "husky": {
+ "hooks": {
+ "commit-msg": "commitlint -E HUSKY_GIT_PARAMS",
+ "pre-commit": "lint-staged"
+ }
+ },
+ "lint-staged": {
+ "*.{js,jsx,json,md}": [
+ "prettier --write"
+ ],
+ "*.{js,jsx}": [
+ "eslint --fix"
+ ]
+ },
+ "standard-version": {
+ "scripts": {
+ "postchangelog": "prettier --write CHANGELOG.md"
+ }
+ }
+}
diff --git a/node_modules/uuid/wrapper.mjs b/node_modules/uuid/wrapper.mjs
new file mode 100644
index 0000000..c31e9ce
--- /dev/null
+++ b/node_modules/uuid/wrapper.mjs
@@ -0,0 +1,10 @@
+import uuid from './dist/index.js';
+export const v1 = uuid.v1;
+export const v3 = uuid.v3;
+export const v4 = uuid.v4;
+export const v5 = uuid.v5;
+export const NIL = uuid.NIL;
+export const version = uuid.version;
+export const validate = uuid.validate;
+export const stringify = uuid.stringify;
+export const parse = uuid.parse;
diff --git a/node_modules/yaml/LICENSE b/node_modules/yaml/LICENSE
new file mode 100644
index 0000000..e060aaa
--- /dev/null
+++ b/node_modules/yaml/LICENSE
@@ -0,0 +1,13 @@
+Copyright Eemeli Aro <eemeli@gmail.com>
+
+Permission to use, copy, modify, and/or distribute this software for any purpose
+with or without fee is hereby granted, provided that the above copyright notice
+and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
+THIS SOFTWARE.
diff --git a/node_modules/yaml/README.md b/node_modules/yaml/README.md
new file mode 100644
index 0000000..0d1ad18
--- /dev/null
+++ b/node_modules/yaml/README.md
@@ -0,0 +1,143 @@
+# YAML <a href="https://www.npmjs.com/package/yaml"><img align="right" src="https://badge.fury.io/js/yaml.svg" title="npm package" /></a>
+
+`yaml` is a definitive library for [YAML](http://yaml.org/), the human friendly data serialization standard.
+This library:
+
+- Supports both YAML 1.1 and YAML 1.2 and all common data schemas,
+- Passes all of the [yaml-test-suite](https://github.com/yaml/yaml-test-suite) tests,
+- Can accept any string as input without throwing, parsing as much YAML out of it as it can, and
+- Supports parsing, modifying, and writing YAML comments and blank lines.
+
+The library is released under the ISC open source license, and the code is [available on GitHub](https://github.com/eemeli/yaml/).
+It has no external dependencies and runs on Node.js as well as modern browsers.
+
+For the purposes of versioning, any changes that break any of the documented endpoints or APIs will be considered semver-major breaking changes.
+Undocumented library internals may change between minor versions, and previous APIs may be deprecated (but not removed).
+
+For more information, see the project's documentation site: [**eemeli.org/yaml**](https://eemeli.org/yaml/)
+
+To install:
+
+```sh
+npm install yaml
+```
+
+**Note:** These docs are for `yaml@2`. For v1, see the [v1.10.0 tag](https://github.com/eemeli/yaml/tree/v1.10.0) for the source and [eemeli.org/yaml/v1](https://eemeli.org/yaml/v1/) for the documentation.
+
+## API Overview
+
+The API provided by `yaml` has three layers, depending on how deep you need to go: [Parse & Stringify](https://eemeli.org/yaml/#parse-amp-stringify), [Documents](https://eemeli.org/yaml/#documents), and the underlying [Lexer/Parser/Composer](https://eemeli.org/yaml/#parsing-yaml).
+The first has the simplest API and "just works", the second gets you all the bells and whistles supported by the library along with a decent [AST](https://eemeli.org/yaml/#content-nodes), and the third lets you get progressively closer to YAML source, if that's your thing.
+
+```js
+import { parse, stringify } from 'yaml'
+// or
+import YAML from 'yaml'
+// or
+const YAML = require('yaml')
+```
+
+### Parse & Stringify
+
+- [`parse(str, reviver?, options?): value`](https://eemeli.org/yaml/#yaml-parse)
+- [`stringify(value, replacer?, options?): string`](https://eemeli.org/yaml/#yaml-stringify)
+
+### Documents
+
+- [`Document`](https://eemeli.org/yaml/#documents)
+ - [`constructor(value, replacer?, options?)`](https://eemeli.org/yaml/#creating-documents)
+ - [`#anchors`](https://eemeli.org/yaml/#working-with-anchors)
+ - [`#contents`](https://eemeli.org/yaml/#content-nodes)
+ - [`#directives`](https://eemeli.org/yaml/#stream-directives)
+ - [`#errors`](https://eemeli.org/yaml/#errors)
+ - [`#warnings`](https://eemeli.org/yaml/#errors)
+- [`isDocument(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
+- [`parseAllDocuments(str, options?): Document[]`](https://eemeli.org/yaml/#parsing-documents)
+- [`parseDocument(str, options?): Document`](https://eemeli.org/yaml/#parsing-documents)
+
+### Content Nodes
+
+- [`isAlias(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
+- [`isCollection(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
+- [`isMap(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
+- [`isNode(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
+- [`isPair(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
+- [`isScalar(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
+- [`isSeq(foo): boolean`](https://eemeli.org/yaml/#identifying-nodes)
+- [`new Scalar(value)`](https://eemeli.org/yaml/#scalar-values)
+- [`new YAMLMap()`](https://eemeli.org/yaml/#collections)
+- [`new YAMLSeq()`](https://eemeli.org/yaml/#collections)
+- [`doc.createAlias(node, name?): Alias`](https://eemeli.org/yaml/#working-with-anchors)
+- [`doc.createNode(value, options?): Node`](https://eemeli.org/yaml/#creating-nodes)
+- [`doc.createPair(key, value): Pair`](https://eemeli.org/yaml/#creating-nodes)
+- [`visit(node, visitor)`](https://eemeli.org/yaml/#modifying-nodes)
+
+### Parsing YAML
+
+- [`new Lexer().lex(src)`](https://eemeli.org/yaml/#lexer)
+- [`new Parser(onNewLine?).parse(src)`](https://eemeli.org/yaml/#parser)
+- [`new Composer(options?).compose(tokens)`](https://eemeli.org/yaml/#composer)
+
+## YAML.parse
+
+```yaml
+# file.yml
+YAML:
+ - A human-readable data serialization language
+ - https://en.wikipedia.org/wiki/YAML
+yaml:
+ - A complete JavaScript implementation
+ - https://www.npmjs.com/package/yaml
+```
+
+```js
+import fs from 'fs'
+import YAML from 'yaml'
+
+YAML.parse('3.14159')
+// 3.14159
+
+YAML.parse('[ true, false, maybe, null ]\n')
+// [ true, false, 'maybe', null ]
+
+const file = fs.readFileSync('./file.yml', 'utf8')
+YAML.parse(file)
+// { YAML:
+// [ 'A human-readable data serialization language',
+// 'https://en.wikipedia.org/wiki/YAML' ],
+// yaml:
+// [ 'A complete JavaScript implementation',
+// 'https://www.npmjs.com/package/yaml' ] }
+```
+
+## YAML.stringify
+
+```js
+import YAML from 'yaml'
+
+YAML.stringify(3.14159)
+// '3.14159\n'
+
+YAML.stringify([true, false, 'maybe', null])
+// `- true
+// - false
+// - maybe
+// - null
+// `
+
+YAML.stringify({ number: 3, plain: 'string', block: 'two\nlines\n' })
+// `number: 3
+// plain: string
+// block: |
+// two
+// lines
+// `
+```
+
+---
+
+Browser testing provided by:
+
+<a href="https://www.browserstack.com/open-source">
+<img width=200 src="https://eemeli.org/yaml/images/browserstack.svg" />
+</a>
diff --git a/node_modules/yaml/browser/dist/compose/compose-collection.js b/node_modules/yaml/browser/dist/compose/compose-collection.js
new file mode 100644
index 0000000..6cf509c
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/compose-collection.js
@@ -0,0 +1,59 @@
+import { isNode, isMap } from '../nodes/Node.js';
+import { Scalar } from '../nodes/Scalar.js';
+import { resolveBlockMap } from './resolve-block-map.js';
+import { resolveBlockSeq } from './resolve-block-seq.js';
+import { resolveFlowCollection } from './resolve-flow-collection.js';
+
+function composeCollection(CN, ctx, token, tagToken, onError) {
+ let coll;
+ switch (token.type) {
+ case 'block-map': {
+ coll = resolveBlockMap(CN, ctx, token, onError);
+ break;
+ }
+ case 'block-seq': {
+ coll = resolveBlockSeq(CN, ctx, token, onError);
+ break;
+ }
+ case 'flow-collection': {
+ coll = resolveFlowCollection(CN, ctx, token, onError);
+ break;
+ }
+ }
+ if (!tagToken)
+ return coll;
+ const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));
+ if (!tagName)
+ return coll;
+ // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841
+ const Coll = coll.constructor;
+ if (tagName === '!' || tagName === Coll.tagName) {
+ coll.tag = Coll.tagName;
+ return coll;
+ }
+ const expType = isMap(coll) ? 'map' : 'seq';
+ let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName);
+ if (!tag) {
+ const kt = ctx.schema.knownTags[tagName];
+ if (kt && kt.collection === expType) {
+ ctx.schema.tags.push(Object.assign({}, kt, { default: false }));
+ tag = kt;
+ }
+ else {
+ onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);
+ coll.tag = tagName;
+ return coll;
+ }
+ }
+ const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options);
+ const node = isNode(res)
+ ? res
+ : new Scalar(res);
+ node.range = coll.range;
+ node.tag = tagName;
+ if (tag === null || tag === void 0 ? void 0 : tag.format)
+ node.format = tag.format;
+ return node;
+}
+
+export { composeCollection };
diff --git a/node_modules/yaml/browser/dist/compose/compose-doc.js b/node_modules/yaml/browser/dist/compose/compose-doc.js
new file mode 100644
index 0000000..54451c9
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/compose-doc.js
@@ -0,0 +1,40 @@
+import { Document } from '../doc/Document.js';
+import { composeNode, composeEmptyNode } from './compose-node.js';
+import { resolveEnd } from './resolve-end.js';
+import { resolveProps } from './resolve-props.js';
+
+function composeDoc(options, directives, { offset, start, value, end }, onError) {
+ const opts = Object.assign({ directives }, options);
+ const doc = new Document(undefined, opts);
+ const ctx = {
+ atRoot: true,
+ directives: doc.directives,
+ options: doc.options,
+ schema: doc.schema
+ };
+ const props = resolveProps(start, {
+ indicator: 'doc-start',
+ next: value !== null && value !== void 0 ? value : end === null || end === void 0 ? void 0 : end[0],
+ offset,
+ onError,
+ startOnNewline: true
+ });
+ if (props.found) {
+ doc.directives.docStart = true;
+ if (value &&
+ (value.type === 'block-map' || value.type === 'block-seq') &&
+ !props.hasNewline)
+ onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');
+ }
+ doc.contents = value
+ ? composeNode(ctx, value, props, onError)
+ : composeEmptyNode(ctx, props.end, start, null, props, onError);
+ const contentEnd = doc.contents.range[2];
+ const re = resolveEnd(end, contentEnd, false, onError);
+ if (re.comment)
+ doc.comment = re.comment;
+ doc.range = [offset, contentEnd, re.offset];
+ return doc;
+}
+
+export { composeDoc };
diff --git a/node_modules/yaml/browser/dist/compose/compose-node.js b/node_modules/yaml/browser/dist/compose/compose-node.js
new file mode 100644
index 0000000..fea76ce
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/compose-node.js
@@ -0,0 +1,90 @@
+import { Alias } from '../nodes/Alias.js';
+import { composeCollection } from './compose-collection.js';
+import { composeScalar } from './compose-scalar.js';
+import { resolveEnd } from './resolve-end.js';
+import { emptyScalarPosition } from './util-empty-scalar-position.js';
+
+const CN = { composeNode, composeEmptyNode };
+function composeNode(ctx, token, props, onError) {
+ const { spaceBefore, comment, anchor, tag } = props;
+ let node;
+ let isSrcToken = true;
+ switch (token.type) {
+ case 'alias':
+ node = composeAlias(ctx, token, onError);
+ if (anchor || tag)
+ onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');
+ break;
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ case 'block-scalar':
+ node = composeScalar(ctx, token, tag, onError);
+ if (anchor)
+ node.anchor = anchor.source.substring(1);
+ break;
+ case 'block-map':
+ case 'block-seq':
+ case 'flow-collection':
+ node = composeCollection(CN, ctx, token, tag, onError);
+ if (anchor)
+ node.anchor = anchor.source.substring(1);
+ break;
+ default: {
+ const message = token.type === 'error'
+ ? token.message
+ : `Unsupported token (type: ${token.type})`;
+ onError(token, 'UNEXPECTED_TOKEN', message);
+ node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);
+ isSrcToken = false;
+ }
+ }
+ if (anchor && node.anchor === '')
+ onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
+ if (spaceBefore)
+ node.spaceBefore = true;
+ if (comment) {
+ if (token.type === 'scalar' && token.source === '')
+ node.comment = comment;
+ else
+ node.commentBefore = comment;
+ }
+ // @ts-expect-error Type checking misses meaning of isSrcToken
+ if (ctx.options.keepSourceTokens && isSrcToken)
+ node.srcToken = token;
+ return node;
+}
+function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) {
+ const token = {
+ type: 'scalar',
+ offset: emptyScalarPosition(offset, before, pos),
+ indent: -1,
+ source: ''
+ };
+ const node = composeScalar(ctx, token, tag, onError);
+ if (anchor) {
+ node.anchor = anchor.source.substring(1);
+ if (node.anchor === '')
+ onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
+ }
+ if (spaceBefore)
+ node.spaceBefore = true;
+ if (comment)
+ node.comment = comment;
+ return node;
+}
+function composeAlias({ options }, { offset, source, end }, onError) {
+ const alias = new Alias(source.substring(1));
+ if (alias.source === '')
+ onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');
+ if (alias.source.endsWith(':'))
+ onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);
+ const valueEnd = offset + source.length;
+ const re = resolveEnd(end, valueEnd, options.strict, onError);
+ alias.range = [offset, valueEnd, re.offset];
+ if (re.comment)
+ alias.comment = re.comment;
+ return alias;
+}
+
+export { composeEmptyNode, composeNode };
diff --git a/node_modules/yaml/browser/dist/compose/compose-scalar.js b/node_modules/yaml/browser/dist/compose/compose-scalar.js
new file mode 100644
index 0000000..8d50c56
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/compose-scalar.js
@@ -0,0 +1,81 @@
+import { SCALAR, isScalar } from '../nodes/Node.js';
+import { Scalar } from '../nodes/Scalar.js';
+import { resolveBlockScalar } from './resolve-block-scalar.js';
+import { resolveFlowScalar } from './resolve-flow-scalar.js';
+
+function composeScalar(ctx, token, tagToken, onError) {
+ const { value, type, comment, range } = token.type === 'block-scalar'
+ ? resolveBlockScalar(token, ctx.options.strict, onError)
+ : resolveFlowScalar(token, ctx.options.strict, onError);
+ const tagName = tagToken
+ ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
+ : null;
+ const tag = tagToken && tagName
+ ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)
+ : token.type === 'scalar'
+ ? findScalarTagByTest(ctx, value, token, onError)
+ : ctx.schema[SCALAR];
+ let scalar;
+ try {
+ const res = tag.resolve(value, msg => onError(tagToken !== null && tagToken !== void 0 ? tagToken : token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
+ scalar = isScalar(res) ? res : new Scalar(res);
+ }
+ catch (error) {
+ const msg = error instanceof Error ? error.message : String(error);
+ onError(tagToken !== null && tagToken !== void 0 ? tagToken : token, 'TAG_RESOLVE_FAILED', msg);
+ scalar = new Scalar(value);
+ }
+ scalar.range = range;
+ scalar.source = value;
+ if (type)
+ scalar.type = type;
+ if (tagName)
+ scalar.tag = tagName;
+ if (tag.format)
+ scalar.format = tag.format;
+ if (comment)
+ scalar.comment = comment;
+ return scalar;
+}
+function findScalarTagByName(schema, value, tagName, tagToken, onError) {
+ var _a;
+ if (tagName === '!')
+ return schema[SCALAR]; // non-specific tag
+ const matchWithTest = [];
+ for (const tag of schema.tags) {
+ if (!tag.collection && tag.tag === tagName) {
+ if (tag.default && tag.test)
+ matchWithTest.push(tag);
+ else
+ return tag;
+ }
+ }
+ for (const tag of matchWithTest)
+ if ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value))
+ return tag;
+ const kt = schema.knownTags[tagName];
+ if (kt && !kt.collection) {
+ // Ensure that the known tag is available for stringifying,
+ // but does not get used by default.
+ schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));
+ return kt;
+ }
+ onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
+ return schema[SCALAR];
+}
+function findScalarTagByTest({ directives, schema }, value, token, onError) {
+ var _a;
+ const tag = schema.tags.find(tag => { var _a; return tag.default && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value)); }) || schema[SCALAR];
+ if (schema.compat) {
+ const compat = (_a = schema.compat.find(tag => { var _a; return tag.default && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value)); })) !== null && _a !== void 0 ? _a : schema[SCALAR];
+ if (tag.tag !== compat.tag) {
+ const ts = directives.tagString(tag.tag);
+ const cs = directives.tagString(compat.tag);
+ const msg = `Value may be parsed as either ${ts} or ${cs}`;
+ onError(token, 'TAG_RESOLVE_FAILED', msg, true);
+ }
+ }
+ return tag;
+}
+
+export { composeScalar };
diff --git a/node_modules/yaml/browser/dist/compose/composer.js b/node_modules/yaml/browser/dist/compose/composer.js
new file mode 100644
index 0000000..431650b
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/composer.js
@@ -0,0 +1,218 @@
+import { Directives } from '../doc/directives.js';
+import { Document } from '../doc/Document.js';
+import { YAMLWarning, YAMLParseError } from '../errors.js';
+import { isCollection, isPair } from '../nodes/Node.js';
+import { composeDoc } from './compose-doc.js';
+import { resolveEnd } from './resolve-end.js';
+
+function getErrorPos(src) {
+ if (typeof src === 'number')
+ return [src, src + 1];
+ if (Array.isArray(src))
+ return src.length === 2 ? src : [src[0], src[1]];
+ const { offset, source } = src;
+ return [offset, offset + (typeof source === 'string' ? source.length : 1)];
+}
+function parsePrelude(prelude) {
+ var _a;
+ let comment = '';
+ let atComment = false;
+ let afterEmptyLine = false;
+ for (let i = 0; i < prelude.length; ++i) {
+ const source = prelude[i];
+ switch (source[0]) {
+ case '#':
+ comment +=
+ (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') +
+ (source.substring(1) || ' ');
+ atComment = true;
+ afterEmptyLine = false;
+ break;
+ case '%':
+ if (((_a = prelude[i + 1]) === null || _a === void 0 ? void 0 : _a[0]) !== '#')
+ i += 1;
+ atComment = false;
+ break;
+ default:
+ // This may be wrong after doc-end, but in that case it doesn't matter
+ if (!atComment)
+ afterEmptyLine = true;
+ atComment = false;
+ }
+ }
+ return { comment, afterEmptyLine };
+}
+/**
+ * Compose a stream of CST nodes into a stream of YAML Documents.
+ *
+ * ```ts
+ * import { Composer, Parser } from 'yaml'
+ *
+ * const src: string = ...
+ * const tokens = new Parser().parse(src)
+ * const docs = new Composer().compose(tokens)
+ * ```
+ */
+class Composer {
+ constructor(options = {}) {
+ this.doc = null;
+ this.atDirectives = false;
+ this.prelude = [];
+ this.errors = [];
+ this.warnings = [];
+ this.onError = (source, code, message, warning) => {
+ const pos = getErrorPos(source);
+ if (warning)
+ this.warnings.push(new YAMLWarning(pos, code, message));
+ else
+ this.errors.push(new YAMLParseError(pos, code, message));
+ };
+ // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
+ this.directives = new Directives({ version: options.version || '1.2' });
+ this.options = options;
+ }
+ decorate(doc, afterDoc) {
+ const { comment, afterEmptyLine } = parsePrelude(this.prelude);
+ //console.log({ dc: doc.comment, prelude, comment })
+ if (comment) {
+ const dc = doc.contents;
+ if (afterDoc) {
+ doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment;
+ }
+ else if (afterEmptyLine || doc.directives.docStart || !dc) {
+ doc.commentBefore = comment;
+ }
+ else if (isCollection(dc) && !dc.flow && dc.items.length > 0) {
+ let it = dc.items[0];
+ if (isPair(it))
+ it = it.key;
+ const cb = it.commentBefore;
+ it.commentBefore = cb ? `${comment}\n${cb}` : comment;
+ }
+ else {
+ const cb = dc.commentBefore;
+ dc.commentBefore = cb ? `${comment}\n${cb}` : comment;
+ }
+ }
+ if (afterDoc) {
+ Array.prototype.push.apply(doc.errors, this.errors);
+ Array.prototype.push.apply(doc.warnings, this.warnings);
+ }
+ else {
+ doc.errors = this.errors;
+ doc.warnings = this.warnings;
+ }
+ this.prelude = [];
+ this.errors = [];
+ this.warnings = [];
+ }
+ /**
+ * Current stream status information.
+ *
+ * Mostly useful at the end of input for an empty stream.
+ */
+ streamInfo() {
+ return {
+ comment: parsePrelude(this.prelude).comment,
+ directives: this.directives,
+ errors: this.errors,
+ warnings: this.warnings
+ };
+ }
+ /**
+ * Compose tokens into documents.
+ *
+ * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
+ * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
+ */
+ *compose(tokens, forceDoc = false, endOffset = -1) {
+ for (const token of tokens)
+ yield* this.next(token);
+ yield* this.end(forceDoc, endOffset);
+ }
+ /** Advance the composer by one CST token. */
+ *next(token) {
+ switch (token.type) {
+ case 'directive':
+ this.directives.add(token.source, (offset, message, warning) => {
+ const pos = getErrorPos(token);
+ pos[0] += offset;
+ this.onError(pos, 'BAD_DIRECTIVE', message, warning);
+ });
+ this.prelude.push(token.source);
+ this.atDirectives = true;
+ break;
+ case 'document': {
+ const doc = composeDoc(this.options, this.directives, token, this.onError);
+ if (this.atDirectives && !doc.directives.docStart)
+ this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');
+ this.decorate(doc, false);
+ if (this.doc)
+ yield this.doc;
+ this.doc = doc;
+ this.atDirectives = false;
+ break;
+ }
+ case 'byte-order-mark':
+ case 'space':
+ break;
+ case 'comment':
+ case 'newline':
+ this.prelude.push(token.source);
+ break;
+ case 'error': {
+ const msg = token.source
+ ? `${token.message}: ${JSON.stringify(token.source)}`
+ : token.message;
+ const error = new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);
+ if (this.atDirectives || !this.doc)
+ this.errors.push(error);
+ else
+ this.doc.errors.push(error);
+ break;
+ }
+ case 'doc-end': {
+ if (!this.doc) {
+ const msg = 'Unexpected doc-end without preceding document';
+ this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));
+ break;
+ }
+ this.doc.directives.docEnd = true;
+ const end = resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);
+ this.decorate(this.doc, true);
+ if (end.comment) {
+ const dc = this.doc.comment;
+ this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment;
+ }
+ this.doc.range[2] = end.offset;
+ break;
+ }
+ default:
+ this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));
+ }
+ }
+ /**
+ * Call at end of input to yield any remaining document.
+ *
+ * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
+ * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
+ */
+ *end(forceDoc = false, endOffset = -1) {
+ if (this.doc) {
+ this.decorate(this.doc, true);
+ yield this.doc;
+ this.doc = null;
+ }
+ else if (forceDoc) {
+ const opts = Object.assign({ directives: this.directives }, this.options);
+ const doc = new Document(undefined, opts);
+ if (this.atDirectives)
+ this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');
+ doc.range = [0, endOffset, endOffset];
+ this.decorate(doc, false);
+ yield doc;
+ }
+ }
+}
+
+export { Composer };
diff --git a/node_modules/yaml/browser/dist/compose/resolve-block-map.js b/node_modules/yaml/browser/dist/compose/resolve-block-map.js
new file mode 100644
index 0000000..c61ba3c
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/resolve-block-map.js
@@ -0,0 +1,107 @@
+import { Pair } from '../nodes/Pair.js';
+import { YAMLMap } from '../nodes/YAMLMap.js';
+import { resolveProps } from './resolve-props.js';
+import { containsNewline } from './util-contains-newline.js';
+import { flowIndentCheck } from './util-flow-indent-check.js';
+import { mapIncludes } from './util-map-includes.js';
+
+const startColMsg = 'All mapping items must start at the same column';
+function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) {
+ var _a;
+ const map = new YAMLMap(ctx.schema);
+ if (ctx.atRoot)
+ ctx.atRoot = false;
+ let offset = bm.offset;
+ for (const collItem of bm.items) {
+ const { start, key, sep, value } = collItem;
+ // key properties
+ const keyProps = resolveProps(start, {
+ indicator: 'explicit-key-ind',
+ next: key !== null && key !== void 0 ? key : sep === null || sep === void 0 ? void 0 : sep[0],
+ offset,
+ onError,
+ startOnNewline: true
+ });
+ const implicitKey = !keyProps.found;
+ if (implicitKey) {
+ if (key) {
+ if (key.type === 'block-seq')
+ onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');
+ else if ('indent' in key && key.indent !== bm.indent)
+ onError(offset, 'BAD_INDENT', startColMsg);
+ }
+ if (!keyProps.anchor && !keyProps.tag && !sep) {
+ // TODO: assert being at last item?
+ if (keyProps.comment) {
+ if (map.comment)
+ map.comment += '\n' + keyProps.comment;
+ else
+ map.comment = keyProps.comment;
+ }
+ continue;
+ }
+ }
+ else if (((_a = keyProps.found) === null || _a === void 0 ? void 0 : _a.indent) !== bm.indent)
+ onError(offset, 'BAD_INDENT', startColMsg);
+ if (implicitKey && containsNewline(key))
+ onError(key, // checked by containsNewline()
+ 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');
+ // key value
+ const keyStart = keyProps.end;
+ const keyNode = key
+ ? composeNode(ctx, key, keyProps, onError)
+ : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
+ if (ctx.schema.compat)
+ flowIndentCheck(bm.indent, key, onError);
+ if (mapIncludes(ctx, map.items, keyNode))
+ onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
+ // value properties
+ const valueProps = resolveProps(sep !== null && sep !== void 0 ? sep : [], {
+ indicator: 'map-value-ind',
+ next: value,
+ offset: keyNode.range[2],
+ onError,
+ startOnNewline: !key || key.type === 'block-scalar'
+ });
+ offset = valueProps.end;
+ if (valueProps.found) {
+ if (implicitKey) {
+ if ((value === null || value === void 0 ? void 0 : value.type) === 'block-map' && !valueProps.hasNewline)
+ onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');
+ if (ctx.options.strict &&
+ keyProps.start < valueProps.found.offset - 1024)
+ onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');
+ }
+ // value value
+ const valueNode = value
+ ? composeNode(ctx, value, valueProps, onError)
+ : composeEmptyNode(ctx, offset, sep, null, valueProps, onError);
+ if (ctx.schema.compat)
+ flowIndentCheck(bm.indent, value, onError);
+ offset = valueNode.range[2];
+ const pair = new Pair(keyNode, valueNode);
+ if (ctx.options.keepSourceTokens)
+ pair.srcToken = collItem;
+ map.items.push(pair);
+ }
+ else {
+ // key with no value
+ if (implicitKey)
+ onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');
+ if (valueProps.comment) {
+ if (keyNode.comment)
+ keyNode.comment += '\n' + valueProps.comment;
+ else
+ keyNode.comment = valueProps.comment;
+ }
+ const pair = new Pair(keyNode);
+ if (ctx.options.keepSourceTokens)
+ pair.srcToken = collItem;
+ map.items.push(pair);
+ }
+ }
+ map.range = [bm.offset, offset, offset];
+ return map;
+}
+
+export { resolveBlockMap };
diff --git a/node_modules/yaml/browser/dist/compose/resolve-block-scalar.js b/node_modules/yaml/browser/dist/compose/resolve-block-scalar.js
new file mode 100644
index 0000000..ee65612
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/resolve-block-scalar.js
@@ -0,0 +1,194 @@
+import { Scalar } from '../nodes/Scalar.js';
+
+function resolveBlockScalar(scalar, strict, onError) {
+ const start = scalar.offset;
+ const header = parseBlockScalarHeader(scalar, strict, onError);
+ if (!header)
+ return { value: '', type: null, comment: '', range: [start, start, start] };
+ const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL;
+ const lines = scalar.source ? splitLines(scalar.source) : [];
+ // determine the end of content & start of chomping
+ let chompStart = lines.length;
+ for (let i = lines.length - 1; i >= 0; --i) {
+ const content = lines[i][1];
+ if (content === '' || content === '\r')
+ chompStart = i;
+ else
+ break;
+ }
+ // shortcut for empty contents
+ if (chompStart === 0) {
+ const value = header.chomp === '+' && lines.length > 0
+ ? '\n'.repeat(Math.max(1, lines.length - 1))
+ : '';
+ let end = start + header.length;
+ if (scalar.source)
+ end += scalar.source.length;
+ return { value, type, comment: header.comment, range: [start, end, end] };
+ }
+ // find the indentation level to trim from start
+ let trimIndent = scalar.indent + header.indent;
+ let offset = scalar.offset + header.length;
+ let contentStart = 0;
+ for (let i = 0; i < chompStart; ++i) {
+ const [indent, content] = lines[i];
+ if (content === '' || content === '\r') {
+ if (header.indent === 0 && indent.length > trimIndent)
+ trimIndent = indent.length;
+ }
+ else {
+ if (indent.length < trimIndent) {
+ const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';
+ onError(offset + indent.length, 'MISSING_CHAR', message);
+ }
+ if (header.indent === 0)
+ trimIndent = indent.length;
+ contentStart = i;
+ break;
+ }
+ offset += indent.length + content.length + 1;
+ }
+ // include trailing more-indented empty lines in content
+ for (let i = lines.length - 1; i >= chompStart; --i) {
+ if (lines[i][0].length > trimIndent)
+ chompStart = i + 1;
+ }
+ let value = '';
+ let sep = '';
+ let prevMoreIndented = false;
+ // leading whitespace is kept intact
+ for (let i = 0; i < contentStart; ++i)
+ value += lines[i][0].slice(trimIndent) + '\n';
+ for (let i = contentStart; i < chompStart; ++i) {
+ let [indent, content] = lines[i];
+ offset += indent.length + content.length + 1;
+ const crlf = content[content.length - 1] === '\r';
+ if (crlf)
+ content = content.slice(0, -1);
+ /* istanbul ignore if already caught in lexer */
+ if (content && indent.length < trimIndent) {
+ const src = header.indent
+ ? 'explicit indentation indicator'
+ : 'first line';
+ const message = `Block scalar lines must not be less indented than their ${src}`;
+ onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);
+ indent = '';
+ }
+ if (type === Scalar.BLOCK_LITERAL) {
+ value += sep + indent.slice(trimIndent) + content;
+ sep = '\n';
+ }
+ else if (indent.length > trimIndent || content[0] === '\t') {
+ // more-indented content within a folded block
+ if (sep === ' ')
+ sep = '\n';
+ else if (!prevMoreIndented && sep === '\n')
+ sep = '\n\n';
+ value += sep + indent.slice(trimIndent) + content;
+ sep = '\n';
+ prevMoreIndented = true;
+ }
+ else if (content === '') {
+ // empty line
+ if (sep === '\n')
+ value += '\n';
+ else
+ sep = '\n';
+ }
+ else {
+ value += sep + content;
+ sep = ' ';
+ prevMoreIndented = false;
+ }
+ }
+ switch (header.chomp) {
+ case '-':
+ break;
+ case '+':
+ for (let i = chompStart; i < lines.length; ++i)
+ value += '\n' + lines[i][0].slice(trimIndent);
+ if (value[value.length - 1] !== '\n')
+ value += '\n';
+ break;
+ default:
+ value += '\n';
+ }
+ const end = start + header.length + scalar.source.length;
+ return { value, type, comment: header.comment, range: [start, end, end] };
+}
+function parseBlockScalarHeader({ offset, props }, strict, onError) {
+ /* istanbul ignore if should not happen */
+ if (props[0].type !== 'block-scalar-header') {
+ onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');
+ return null;
+ }
+ const { source } = props[0];
+ const mode = source[0];
+ let indent = 0;
+ let chomp = '';
+ let error = -1;
+ for (let i = 1; i < source.length; ++i) {
+ const ch = source[i];
+ if (!chomp && (ch === '-' || ch === '+'))
+ chomp = ch;
+ else {
+ const n = Number(ch);
+ if (!indent && n)
+ indent = n;
+ else if (error === -1)
+ error = offset + i;
+ }
+ }
+ if (error !== -1)
+ onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);
+ let hasSpace = false;
+ let comment = '';
+ let length = source.length;
+ for (let i = 1; i < props.length; ++i) {
+ const token = props[i];
+ switch (token.type) {
+ case 'space':
+ hasSpace = true;
+ // fallthrough
+ case 'newline':
+ length += token.source.length;
+ break;
+ case 'comment':
+ if (strict && !hasSpace) {
+ const message = 'Comments must be separated from other tokens by white space characters';
+ onError(token, 'MISSING_CHAR', message);
+ }
+ length += token.source.length;
+ comment = token.source.substring(1);
+ break;
+ case 'error':
+ onError(token, 'UNEXPECTED_TOKEN', token.message);
+ length += token.source.length;
+ break;
+ /* istanbul ignore next should not happen */
+ default: {
+ const message = `Unexpected token in block scalar header: ${token.type}`;
+ onError(token, 'UNEXPECTED_TOKEN', message);
+ const ts = token.source;
+ if (ts && typeof ts === 'string')
+ length += ts.length;
+ }
+ }
+ }
+ return { mode, indent, chomp, comment, length };
+}
+/** @returns Array of lines split up as `[indent, content]` */
+function splitLines(source) {
+ const split = source.split(/\n( *)/);
+ const first = split[0];
+ const m = first.match(/^( *)/);
+ const line0 = (m === null || m === void 0 ? void 0 : m[1])
+ ? [m[1], first.slice(m[1].length)]
+ : ['', first];
+ const lines = [line0];
+ for (let i = 1; i < split.length; i += 2)
+ lines.push([split[i], split[i + 1]]);
+ return lines;
+}
+
+export { resolveBlockScalar };
diff --git a/node_modules/yaml/browser/dist/compose/resolve-block-seq.js b/node_modules/yaml/browser/dist/compose/resolve-block-seq.js
new file mode 100644
index 0000000..d2c2d9e
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/resolve-block-seq.js
@@ -0,0 +1,45 @@
+import { YAMLSeq } from '../nodes/YAMLSeq.js';
+import { resolveProps } from './resolve-props.js';
+import { flowIndentCheck } from './util-flow-indent-check.js';
+
+function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) {
+ const seq = new YAMLSeq(ctx.schema);
+ if (ctx.atRoot)
+ ctx.atRoot = false;
+ let offset = bs.offset;
+ for (const { start, value } of bs.items) {
+ const props = resolveProps(start, {
+ indicator: 'seq-item-ind',
+ next: value,
+ offset,
+ onError,
+ startOnNewline: true
+ });
+ offset = props.end;
+ if (!props.found) {
+ if (props.anchor || props.tag || value) {
+ if (value && value.type === 'block-seq')
+ onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column');
+ else
+ onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');
+ }
+ else {
+ // TODO: assert being at last item?
+ if (props.comment)
+ seq.comment = props.comment;
+ continue;
+ }
+ }
+ const node = value
+ ? composeNode(ctx, value, props, onError)
+ : composeEmptyNode(ctx, offset, start, null, props, onError);
+ if (ctx.schema.compat)
+ flowIndentCheck(bs.indent, value, onError);
+ offset = node.range[2];
+ seq.items.push(node);
+ }
+ seq.range = [bs.offset, offset, offset];
+ return seq;
+}
+
+export { resolveBlockSeq };
diff --git a/node_modules/yaml/browser/dist/compose/resolve-end.js b/node_modules/yaml/browser/dist/compose/resolve-end.js
new file mode 100644
index 0000000..d5c65d7
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/resolve-end.js
@@ -0,0 +1,37 @@
+function resolveEnd(end, offset, reqSpace, onError) {
+ let comment = '';
+ if (end) {
+ let hasSpace = false;
+ let sep = '';
+ for (const token of end) {
+ const { source, type } = token;
+ switch (type) {
+ case 'space':
+ hasSpace = true;
+ break;
+ case 'comment': {
+ if (reqSpace && !hasSpace)
+ onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
+ const cb = source.substring(1) || ' ';
+ if (!comment)
+ comment = cb;
+ else
+ comment += sep + cb;
+ sep = '';
+ break;
+ }
+ case 'newline':
+ if (comment)
+ sep += source;
+ hasSpace = true;
+ break;
+ default:
+ onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);
+ }
+ offset += source.length;
+ }
+ }
+ return { comment, offset };
+}
+
+export { resolveEnd };
diff --git a/node_modules/yaml/browser/dist/compose/resolve-flow-collection.js b/node_modules/yaml/browser/dist/compose/resolve-flow-collection.js
new file mode 100644
index 0000000..001d0d1
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/resolve-flow-collection.js
@@ -0,0 +1,201 @@
+import { isPair } from '../nodes/Node.js';
+import { Pair } from '../nodes/Pair.js';
+import { YAMLMap } from '../nodes/YAMLMap.js';
+import { YAMLSeq } from '../nodes/YAMLSeq.js';
+import { resolveEnd } from './resolve-end.js';
+import { resolveProps } from './resolve-props.js';
+import { containsNewline } from './util-contains-newline.js';
+import { mapIncludes } from './util-map-includes.js';
+
+const blockMsg = 'Block collections are not allowed within flow collections';
+const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');
+function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) {
+ var _a;
+ const isMap = fc.start.source === '{';
+ const fcName = isMap ? 'flow map' : 'flow sequence';
+ const coll = isMap
+ ? new YAMLMap(ctx.schema)
+ : new YAMLSeq(ctx.schema);
+ coll.flow = true;
+ const atRoot = ctx.atRoot;
+ if (atRoot)
+ ctx.atRoot = false;
+ let offset = fc.offset + fc.start.source.length;
+ for (let i = 0; i < fc.items.length; ++i) {
+ const collItem = fc.items[i];
+ const { start, key, sep, value } = collItem;
+ const props = resolveProps(start, {
+ flow: fcName,
+ indicator: 'explicit-key-ind',
+ next: key !== null && key !== void 0 ? key : sep === null || sep === void 0 ? void 0 : sep[0],
+ offset,
+ onError,
+ startOnNewline: false
+ });
+ if (!props.found) {
+ if (!props.anchor && !props.tag && !sep && !value) {
+ if (i === 0 && props.comma)
+ onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
+ else if (i < fc.items.length - 1)
+ onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);
+ if (props.comment) {
+ if (coll.comment)
+ coll.comment += '\n' + props.comment;
+ else
+ coll.comment = props.comment;
+ }
+ offset = props.end;
+ continue;
+ }
+ if (!isMap && ctx.options.strict && containsNewline(key))
+ onError(key, // checked by containsNewline()
+ 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
+ }
+ if (i === 0) {
+ if (props.comma)
+ onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
+ }
+ else {
+ if (!props.comma)
+ onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);
+ if (props.comment) {
+ let prevItemComment = '';
+ loop: for (const st of start) {
+ switch (st.type) {
+ case 'comma':
+ case 'space':
+ break;
+ case 'comment':
+ prevItemComment = st.source.substring(1);
+ break loop;
+ default:
+ break loop;
+ }
+ }
+ if (prevItemComment) {
+ let prev = coll.items[coll.items.length - 1];
+ if (isPair(prev))
+ prev = (_a = prev.value) !== null && _a !== void 0 ? _a : prev.key;
+ if (prev.comment)
+ prev.comment += '\n' + prevItemComment;
+ else
+ prev.comment = prevItemComment;
+ props.comment = props.comment.substring(prevItemComment.length + 1);
+ }
+ }
+ }
+ if (!isMap && !sep && !props.found) {
+ // item is a value in a seq
+ // → key & sep are empty, start does not include ? or :
+ const valueNode = value
+ ? composeNode(ctx, value, props, onError)
+ : composeEmptyNode(ctx, props.end, sep, null, props, onError);
+ coll.items.push(valueNode);
+ offset = valueNode.range[2];
+ if (isBlock(value))
+ onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
+ }
+ else {
+ // item is a key+value pair
+ // key value
+ const keyStart = props.end;
+ const keyNode = key
+ ? composeNode(ctx, key, props, onError)
+ : composeEmptyNode(ctx, keyStart, start, null, props, onError);
+ if (isBlock(key))
+ onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
+ // value properties
+ const valueProps = resolveProps(sep !== null && sep !== void 0 ? sep : [], {
+ flow: fcName,
+ indicator: 'map-value-ind',
+ next: value,
+ offset: keyNode.range[2],
+ onError,
+ startOnNewline: false
+ });
+ if (valueProps.found) {
+ if (!isMap && !props.found && ctx.options.strict) {
+ if (sep)
+ for (const st of sep) {
+ if (st === valueProps.found)
+ break;
+ if (st.type === 'newline') {
+ onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
+ break;
+ }
+ }
+ if (props.start < valueProps.found.offset - 1024)
+ onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');
+ }
+ }
+ else if (value) {
+ if ('source' in value && value.source && value.source[0] === ':')
+ onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);
+ else
+ onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);
+ }
+ // value value
+ const valueNode = value
+ ? composeNode(ctx, value, valueProps, onError)
+ : valueProps.found
+ ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)
+ : null;
+ if (valueNode) {
+ if (isBlock(value))
+ onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
+ }
+ else if (valueProps.comment) {
+ if (keyNode.comment)
+ keyNode.comment += '\n' + valueProps.comment;
+ else
+ keyNode.comment = valueProps.comment;
+ }
+ const pair = new Pair(keyNode, valueNode);
+ if (ctx.options.keepSourceTokens)
+ pair.srcToken = collItem;
+ if (isMap) {
+ const map = coll;
+ if (mapIncludes(ctx, map.items, keyNode))
+ onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
+ map.items.push(pair);
+ }
+ else {
+ const map = new YAMLMap(ctx.schema);
+ map.flow = true;
+ map.items.push(pair);
+ coll.items.push(map);
+ }
+ offset = valueNode ? valueNode.range[2] : valueProps.end;
+ }
+ }
+ const expectedEnd = isMap ? '}' : ']';
+ const [ce, ...ee] = fc.end;
+ let cePos = offset;
+ if (ce && ce.source === expectedEnd)
+ cePos = ce.offset + ce.source.length;
+ else {
+ const name = fcName[0].toUpperCase() + fcName.substring(1);
+ const msg = atRoot
+ ? `${name} must end with a ${expectedEnd}`
+ : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;
+ onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);
+ if (ce && ce.source.length !== 1)
+ ee.unshift(ce);
+ }
+ if (ee.length > 0) {
+ const end = resolveEnd(ee, cePos, ctx.options.strict, onError);
+ if (end.comment) {
+ if (coll.comment)
+ coll.comment += '\n' + end.comment;
+ else
+ coll.comment = end.comment;
+ }
+ coll.range = [fc.offset, cePos, end.offset];
+ }
+ else {
+ coll.range = [fc.offset, cePos, cePos];
+ }
+ return coll;
+}
+
+export { resolveFlowCollection };
diff --git a/node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js b/node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js
new file mode 100644
index 0000000..b0558d7
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js
@@ -0,0 +1,224 @@
+import { Scalar } from '../nodes/Scalar.js';
+import { resolveEnd } from './resolve-end.js';
+
+function resolveFlowScalar(scalar, strict, onError) {
+ const { offset, type, source, end } = scalar;
+ let _type;
+ let value;
+ const _onError = (rel, code, msg) => onError(offset + rel, code, msg);
+ switch (type) {
+ case 'scalar':
+ _type = Scalar.PLAIN;
+ value = plainValue(source, _onError);
+ break;
+ case 'single-quoted-scalar':
+ _type = Scalar.QUOTE_SINGLE;
+ value = singleQuotedValue(source, _onError);
+ break;
+ case 'double-quoted-scalar':
+ _type = Scalar.QUOTE_DOUBLE;
+ value = doubleQuotedValue(source, _onError);
+ break;
+ /* istanbul ignore next should not happen */
+ default:
+ onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);
+ return {
+ value: '',
+ type: null,
+ comment: '',
+ range: [offset, offset + source.length, offset + source.length]
+ };
+ }
+ const valueEnd = offset + source.length;
+ const re = resolveEnd(end, valueEnd, strict, onError);
+ return {
+ value,
+ type: _type,
+ comment: re.comment,
+ range: [offset, valueEnd, re.offset]
+ };
+}
+function plainValue(source, onError) {
+ let badChar = '';
+ switch (source[0]) {
+ /* istanbul ignore next should not happen */
+ case '\t':
+ badChar = 'a tab character';
+ break;
+ case ',':
+ badChar = 'flow indicator character ,';
+ break;
+ case '%':
+ badChar = 'directive indicator character %';
+ break;
+ case '|':
+ case '>': {
+ badChar = `block scalar indicator ${source[0]}`;
+ break;
+ }
+ case '@':
+ case '`': {
+ badChar = `reserved character ${source[0]}`;
+ break;
+ }
+ }
+ if (badChar)
+ onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);
+ return foldLines(source);
+}
+function singleQuotedValue(source, onError) {
+ if (source[source.length - 1] !== "'" || source.length === 1)
+ onError(source.length, 'MISSING_CHAR', "Missing closing 'quote");
+ return foldLines(source.slice(1, -1)).replace(/''/g, "'");
+}
+function foldLines(source) {
+ var _a;
+ /**
+ * The negative lookbehind here and in the `re` RegExp is to
+ * prevent causing a polynomial search time in certain cases.
+ *
+ * The try-catch is for Safari, which doesn't support this yet:
+ * https://caniuse.com/js-regexp-lookbehind
+ */
+ let first, line;
+ try {
+ first = new RegExp('(.*?)(?<![ \t])[ \t]*\r?\n', 'sy');
+ line = new RegExp('[ \t]*(.*?)(?:(?<![ \t])[ \t]*)?\r?\n', 'sy');
+ }
+ catch (_) {
+ first = /(.*?)[ \t]*\r?\n/sy;
+ line = /[ \t]*(.*?)[ \t]*\r?\n/sy;
+ }
+ let match = first.exec(source);
+ if (!match)
+ return source;
+ let res = match[1];
+ let sep = ' ';
+ let pos = first.lastIndex;
+ line.lastIndex = pos;
+ while ((match = line.exec(source))) {
+ if (match[1] === '') {
+ if (sep === '\n')
+ res += sep;
+ else
+ sep = '\n';
+ }
+ else {
+ res += sep + match[1];
+ sep = ' ';
+ }
+ pos = line.lastIndex;
+ }
+ const last = /[ \t]*(.*)/sy;
+ last.lastIndex = pos;
+ match = last.exec(source);
+ return res + sep + ((_a = match === null || match === void 0 ? void 0 : match[1]) !== null && _a !== void 0 ? _a : '');
+}
+function doubleQuotedValue(source, onError) {
+ let res = '';
+ for (let i = 1; i < source.length - 1; ++i) {
+ const ch = source[i];
+ if (ch === '\r' && source[i + 1] === '\n')
+ continue;
+ if (ch === '\n') {
+ const { fold, offset } = foldNewline(source, i);
+ res += fold;
+ i = offset;
+ }
+ else if (ch === '\\') {
+ let next = source[++i];
+ const cc = escapeCodes[next];
+ if (cc)
+ res += cc;
+ else if (next === '\n') {
+ // skip escaped newlines, but still trim the following line
+ next = source[i + 1];
+ while (next === ' ' || next === '\t')
+ next = source[++i + 1];
+ }
+ else if (next === '\r' && source[i + 1] === '\n') {
+ // skip escaped CRLF newlines, but still trim the following line
+ next = source[++i + 1];
+ while (next === ' ' || next === '\t')
+ next = source[++i + 1];
+ }
+ else if (next === 'x' || next === 'u' || next === 'U') {
+ const length = { x: 2, u: 4, U: 8 }[next];
+ res += parseCharCode(source, i + 1, length, onError);
+ i += length;
+ }
+ else {
+ const raw = source.substr(i - 1, 2);
+ onError(i - 1, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
+ res += raw;
+ }
+ }
+ else if (ch === ' ' || ch === '\t') {
+ // trim trailing whitespace
+ const wsStart = i;
+ let next = source[i + 1];
+ while (next === ' ' || next === '\t')
+ next = source[++i + 1];
+ if (next !== '\n' && !(next === '\r' && source[i + 2] === '\n'))
+ res += i > wsStart ? source.slice(wsStart, i + 1) : ch;
+ }
+ else {
+ res += ch;
+ }
+ }
+ if (source[source.length - 1] !== '"' || source.length === 1)
+ onError(source.length, 'MISSING_CHAR', 'Missing closing "quote');
+ return res;
+}
+/**
+ * Fold a single newline into a space, multiple newlines to N - 1 newlines.
+ * Presumes `source[offset] === '\n'`
+ */
+function foldNewline(source, offset) {
+ let fold = '';
+ let ch = source[offset + 1];
+ while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') {
+ if (ch === '\r' && source[offset + 2] !== '\n')
+ break;
+ if (ch === '\n')
+ fold += '\n';
+ offset += 1;
+ ch = source[offset + 1];
+ }
+ if (!fold)
+ fold = ' ';
+ return { fold, offset };
+}
+const escapeCodes = {
+ '0': '\0',
+ a: '\x07',
+ b: '\b',
+ e: '\x1b',
+ f: '\f',
+ n: '\n',
+ r: '\r',
+ t: '\t',
+ v: '\v',
+ N: '\u0085',
+ _: '\u00a0',
+ L: '\u2028',
+ P: '\u2029',
+ ' ': ' ',
+ '"': '"',
+ '/': '/',
+ '\\': '\\',
+ '\t': '\t'
+};
+function parseCharCode(source, offset, length, onError) {
+ const cc = source.substr(offset, length);
+ const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);
+ const code = ok ? parseInt(cc, 16) : NaN;
+ if (isNaN(code)) {
+ const raw = source.substr(offset - 2, length + 2);
+ onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
+ return raw;
+ }
+ return String.fromCodePoint(code);
+}
+
+export { resolveFlowScalar };
diff --git a/node_modules/yaml/browser/dist/compose/resolve-props.js b/node_modules/yaml/browser/dist/compose/resolve-props.js
new file mode 100644
index 0000000..3f4a1b3
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/resolve-props.js
@@ -0,0 +1,130 @@
+function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {
+ let spaceBefore = false;
+ let atNewline = startOnNewline;
+ let hasSpace = startOnNewline;
+ let comment = '';
+ let commentSep = '';
+ let hasNewline = false;
+ let reqSpace = false;
+ let anchor = null;
+ let tag = null;
+ let comma = null;
+ let found = null;
+ let start = null;
+ for (const token of tokens) {
+ if (reqSpace) {
+ if (token.type !== 'space' &&
+ token.type !== 'newline' &&
+ token.type !== 'comma')
+ onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
+ reqSpace = false;
+ }
+ switch (token.type) {
+ case 'space':
+ // At the doc level, tabs at line start may be parsed
+ // as leading white space rather than indentation.
+ // In a flow collection, only the parser handles indent.
+ if (!flow &&
+ atNewline &&
+ indicator !== 'doc-start' &&
+ token.source[0] === '\t')
+ onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
+ hasSpace = true;
+ break;
+ case 'comment': {
+ if (!hasSpace)
+ onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
+ const cb = token.source.substring(1) || ' ';
+ if (!comment)
+ comment = cb;
+ else
+ comment += commentSep + cb;
+ commentSep = '';
+ atNewline = false;
+ break;
+ }
+ case 'newline':
+ if (atNewline) {
+ if (comment)
+ comment += token.source;
+ else
+ spaceBefore = true;
+ }
+ else
+ commentSep += token.source;
+ atNewline = true;
+ hasNewline = true;
+ hasSpace = true;
+ break;
+ case 'anchor':
+ if (anchor)
+ onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');
+ if (token.source.endsWith(':'))
+ onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);
+ anchor = token;
+ if (start === null)
+ start = token.offset;
+ atNewline = false;
+ hasSpace = false;
+ reqSpace = true;
+ break;
+ case 'tag': {
+ if (tag)
+ onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');
+ tag = token;
+ if (start === null)
+ start = token.offset;
+ atNewline = false;
+ hasSpace = false;
+ reqSpace = true;
+ break;
+ }
+ case indicator:
+ // Could here handle preceding comments differently
+ if (anchor || tag)
+ onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);
+ if (found)
+ onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow !== null && flow !== void 0 ? flow : 'collection'}`);
+ found = token;
+ atNewline = false;
+ hasSpace = false;
+ break;
+ case 'comma':
+ if (flow) {
+ if (comma)
+ onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);
+ comma = token;
+ atNewline = false;
+ hasSpace = false;
+ break;
+ }
+ // else fallthrough
+ default:
+ onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);
+ atNewline = false;
+ hasSpace = false;
+ }
+ }
+ const last = tokens[tokens.length - 1];
+ const end = last ? last.offset + last.source.length : offset;
+ if (reqSpace &&
+ next &&
+ next.type !== 'space' &&
+ next.type !== 'newline' &&
+ next.type !== 'comma' &&
+ (next.type !== 'scalar' || next.source !== ''))
+ onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
+ return {
+ comma,
+ found,
+ spaceBefore,
+ comment,
+ hasNewline,
+ anchor,
+ tag,
+ end,
+ start: start !== null && start !== void 0 ? start : end
+ };
+}
+
+export { resolveProps };
diff --git a/node_modules/yaml/browser/dist/compose/util-contains-newline.js b/node_modules/yaml/browser/dist/compose/util-contains-newline.js
new file mode 100644
index 0000000..2d65390
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/util-contains-newline.js
@@ -0,0 +1,34 @@
+function containsNewline(key) {
+ if (!key)
+ return null;
+ switch (key.type) {
+ case 'alias':
+ case 'scalar':
+ case 'double-quoted-scalar':
+ case 'single-quoted-scalar':
+ if (key.source.includes('\n'))
+ return true;
+ if (key.end)
+ for (const st of key.end)
+ if (st.type === 'newline')
+ return true;
+ return false;
+ case 'flow-collection':
+ for (const it of key.items) {
+ for (const st of it.start)
+ if (st.type === 'newline')
+ return true;
+ if (it.sep)
+ for (const st of it.sep)
+ if (st.type === 'newline')
+ return true;
+ if (containsNewline(it.key) || containsNewline(it.value))
+ return true;
+ }
+ return false;
+ default:
+ return true;
+ }
+}
+
+export { containsNewline };
diff --git a/node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js b/node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js
new file mode 100644
index 0000000..4004719
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js
@@ -0,0 +1,27 @@
+function emptyScalarPosition(offset, before, pos) {
+ if (before) {
+ if (pos === null)
+ pos = before.length;
+ for (let i = pos - 1; i >= 0; --i) {
+ let st = before[i];
+ switch (st.type) {
+ case 'space':
+ case 'comment':
+ case 'newline':
+ offset -= st.source.length;
+ continue;
+ }
+ // Technically, an empty scalar is immediately after the last non-empty
+ // node, but it's more useful to place it after any whitespace.
+ st = before[++i];
+ while ((st === null || st === void 0 ? void 0 : st.type) === 'space') {
+ offset += st.source.length;
+ st = before[++i];
+ }
+ break;
+ }
+ }
+ return offset;
+}
+
+export { emptyScalarPosition };
diff --git a/node_modules/yaml/browser/dist/compose/util-flow-indent-check.js b/node_modules/yaml/browser/dist/compose/util-flow-indent-check.js
new file mode 100644
index 0000000..529e1c6
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/util-flow-indent-check.js
@@ -0,0 +1,15 @@
+import { containsNewline } from './util-contains-newline.js';
+
+function flowIndentCheck(indent, fc, onError) {
+ if ((fc === null || fc === void 0 ? void 0 : fc.type) === 'flow-collection') {
+ const end = fc.end[0];
+ if (end.indent === indent &&
+ (end.source === ']' || end.source === '}') &&
+ containsNewline(fc)) {
+ const msg = 'Flow end indicator should be more indented than parent';
+ onError(end, 'BAD_INDENT', msg, true);
+ }
+ }
+}
+
+export { flowIndentCheck };
diff --git a/node_modules/yaml/browser/dist/compose/util-map-includes.js b/node_modules/yaml/browser/dist/compose/util-map-includes.js
new file mode 100644
index 0000000..de74ddf
--- /dev/null
+++ b/node_modules/yaml/browser/dist/compose/util-map-includes.js
@@ -0,0 +1,17 @@
+import { isScalar } from '../nodes/Node.js';
+
+function mapIncludes(ctx, items, search) {
+ const { uniqueKeys } = ctx.options;
+ if (uniqueKeys === false)
+ return false;
+ const isEqual = typeof uniqueKeys === 'function'
+ ? uniqueKeys
+ : (a, b) => a === b ||
+ (isScalar(a) &&
+ isScalar(b) &&
+ a.value === b.value &&
+ !(a.value === '<<' && ctx.schema.merge));
+ return items.some(pair => isEqual(pair.key, search));
+}
+
+export { mapIncludes };
diff --git a/node_modules/yaml/browser/dist/doc/Document.js b/node_modules/yaml/browser/dist/doc/Document.js
new file mode 100644
index 0000000..c436340
--- /dev/null
+++ b/node_modules/yaml/browser/dist/doc/Document.js
@@ -0,0 +1,332 @@
+import { Alias } from '../nodes/Alias.js';
+import { isEmptyPath, collectionFromPath } from '../nodes/Collection.js';
+import { NODE_TYPE, DOC, isNode, isCollection, isScalar } from '../nodes/Node.js';
+import { Pair } from '../nodes/Pair.js';
+import { toJS } from '../nodes/toJS.js';
+import { Schema } from '../schema/Schema.js';
+import { stringify } from '../stringify/stringify.js';
+import { stringifyDocument } from '../stringify/stringifyDocument.js';
+import { anchorNames, findNewAnchor, createNodeAnchors } from './anchors.js';
+import { applyReviver } from './applyReviver.js';
+import { createNode } from './createNode.js';
+import { Directives } from './directives.js';
+
+class Document {
+ constructor(value, replacer, options) {
+ /** A comment before this Document */
+ this.commentBefore = null;
+ /** A comment immediately after this Document */
+ this.comment = null;
+ /** Errors encountered during parsing. */
+ this.errors = [];
+ /** Warnings encountered during parsing. */
+ this.warnings = [];
+ Object.defineProperty(this, NODE_TYPE, { value: DOC });
+ let _replacer = null;
+ if (typeof replacer === 'function' || Array.isArray(replacer)) {
+ _replacer = replacer;
+ }
+ else if (options === undefined && replacer) {
+ options = replacer;
+ replacer = undefined;
+ }
+ const opt = Object.assign({
+ intAsBigInt: false,
+ keepSourceTokens: false,
+ logLevel: 'warn',
+ prettyErrors: true,
+ strict: true,
+ uniqueKeys: true,
+ version: '1.2'
+ }, options);
+ this.options = opt;
+ let { version } = opt;
+ if (options === null || options === void 0 ? void 0 : options.directives) {
+ this.directives = options.directives.atDocument();
+ if (this.directives.yaml.explicit)
+ version = this.directives.yaml.version;
+ }
+ else
+ this.directives = new Directives({ version });
+ this.setSchema(version, options);
+ if (value === undefined)
+ this.contents = null;
+ else {
+ this.contents = this.createNode(value, _replacer, options);
+ }
+ }
+ /**
+ * Create a deep copy of this Document and its contents.
+ *
+ * Custom Node values that inherit from `Object` still refer to their original instances.
+ */
+ clone() {
+ const copy = Object.create(Document.prototype, {
+ [NODE_TYPE]: { value: DOC }
+ });
+ copy.commentBefore = this.commentBefore;
+ copy.comment = this.comment;
+ copy.errors = this.errors.slice();
+ copy.warnings = this.warnings.slice();
+ copy.options = Object.assign({}, this.options);
+ if (this.directives)
+ copy.directives = this.directives.clone();
+ copy.schema = this.schema.clone();
+ copy.contents = isNode(this.contents)
+ ? this.contents.clone(copy.schema)
+ : this.contents;
+ if (this.range)
+ copy.range = this.range.slice();
+ return copy;
+ }
+ /** Adds a value to the document. */
+ add(value) {
+ if (assertCollection(this.contents))
+ this.contents.add(value);
+ }
+ /** Adds a value to the document. */
+ addIn(path, value) {
+ if (assertCollection(this.contents))
+ this.contents.addIn(path, value);
+ }
+ /**
+ * Create a new `Alias` node, ensuring that the target `node` has the required anchor.
+ *
+ * If `node` already has an anchor, `name` is ignored.
+ * Otherwise, the `node.anchor` value will be set to `name`,
+ * or if an anchor with that name is already present in the document,
+ * `name` will be used as a prefix for a new unique anchor.
+ * If `name` is undefined, the generated anchor will use 'a' as a prefix.
+ */
+ createAlias(node, name) {
+ if (!node.anchor) {
+ const prev = anchorNames(this);
+ node.anchor =
+ // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
+ !name || prev.has(name) ? findNewAnchor(name || 'a', prev) : name;
+ }
+ return new Alias(node.anchor);
+ }
+ createNode(value, replacer, options) {
+ let _replacer = undefined;
+ if (typeof replacer === 'function') {
+ value = replacer.call({ '': value }, '', value);
+ _replacer = replacer;
+ }
+ else if (Array.isArray(replacer)) {
+ const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;
+ const asStr = replacer.filter(keyToStr).map(String);
+ if (asStr.length > 0)
+ replacer = replacer.concat(asStr);
+ _replacer = replacer;
+ }
+ else if (options === undefined && replacer) {
+ options = replacer;
+ replacer = undefined;
+ }
+ const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options !== null && options !== void 0 ? options : {};
+ const { onAnchor, setAnchors, sourceObjects } = createNodeAnchors(this,
+ // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
+ anchorPrefix || 'a');
+ const ctx = {
+ aliasDuplicateObjects: aliasDuplicateObjects !== null && aliasDuplicateObjects !== void 0 ? aliasDuplicateObjects : true,
+ keepUndefined: keepUndefined !== null && keepUndefined !== void 0 ? keepUndefined : false,
+ onAnchor,
+ onTagObj,
+ replacer: _replacer,
+ schema: this.schema,
+ sourceObjects
+ };
+ const node = createNode(value, tag, ctx);
+ if (flow && isCollection(node))
+ node.flow = true;
+ setAnchors();
+ return node;
+ }
+ /**
+ * Convert a key and a value into a `Pair` using the current schema,
+ * recursively wrapping all values as `Scalar` or `Collection` nodes.
+ */
+ createPair(key, value, options = {}) {
+ const k = this.createNode(key, null, options);
+ const v = this.createNode(value, null, options);
+ return new Pair(k, v);
+ }
+ /**
+ * Removes a value from the document.
+ * @returns `true` if the item was found and removed.
+ */
+ delete(key) {
+ return assertCollection(this.contents) ? this.contents.delete(key) : false;
+ }
+ /**
+ * Removes a value from the document.
+ * @returns `true` if the item was found and removed.
+ */
+ deleteIn(path) {
+ if (isEmptyPath(path)) {
+ if (this.contents == null)
+ return false;
+ this.contents = null;
+ return true;
+ }
+ return assertCollection(this.contents)
+ ? this.contents.deleteIn(path)
+ : false;
+ }
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ get(key, keepScalar) {
+ return isCollection(this.contents)
+ ? this.contents.get(key, keepScalar)
+ : undefined;
+ }
+ /**
+ * Returns item at `path`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ getIn(path, keepScalar) {
+ if (isEmptyPath(path))
+ return !keepScalar && isScalar(this.contents)
+ ? this.contents.value
+ : this.contents;
+ return isCollection(this.contents)
+ ? this.contents.getIn(path, keepScalar)
+ : undefined;
+ }
+ /**
+ * Checks if the document includes a value with the key `key`.
+ */
+ has(key) {
+ return isCollection(this.contents) ? this.contents.has(key) : false;
+ }
+ /**
+ * Checks if the document includes a value at `path`.
+ */
+ hasIn(path) {
+ if (isEmptyPath(path))
+ return this.contents !== undefined;
+ return isCollection(this.contents) ? this.contents.hasIn(path) : false;
+ }
+ /**
+ * Sets a value in this document. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ set(key, value) {
+ if (this.contents == null) {
+ this.contents = collectionFromPath(this.schema, [key], value);
+ }
+ else if (assertCollection(this.contents)) {
+ this.contents.set(key, value);
+ }
+ }
+ /**
+ * Sets a value in this document. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ setIn(path, value) {
+ if (isEmptyPath(path))
+ this.contents = value;
+ else if (this.contents == null) {
+ this.contents = collectionFromPath(this.schema, Array.from(path), value);
+ }
+ else if (assertCollection(this.contents)) {
+ this.contents.setIn(path, value);
+ }
+ }
+ /**
+ * Change the YAML version and schema used by the document.
+ * A `null` version disables support for directives, explicit tags, anchors, and aliases.
+ * It also requires the `schema` option to be given as a `Schema` instance value.
+ *
+ * Overrides all previously set schema options.
+ */
+ setSchema(version, options = {}) {
+ if (typeof version === 'number')
+ version = String(version);
+ let opt;
+ switch (version) {
+ case '1.1':
+ if (this.directives)
+ this.directives.yaml.version = '1.1';
+ else
+ this.directives = new Directives({ version: '1.1' });
+ opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };
+ break;
+ case '1.2':
+ case 'next':
+ if (this.directives)
+ this.directives.yaml.version = version;
+ else
+ this.directives = new Directives({ version });
+ opt = { merge: false, resolveKnownTags: true, schema: 'core' };
+ break;
+ case null:
+ if (this.directives)
+ delete this.directives;
+ opt = null;
+ break;
+ default: {
+ const sv = JSON.stringify(version);
+ throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);
+ }
+ }
+ // Not using `instanceof Schema` to allow for duck typing
+ if (options.schema instanceof Object)
+ this.schema = options.schema;
+ else if (opt)
+ this.schema = new Schema(Object.assign(opt, options));
+ else
+ throw new Error(`With a null YAML version, the { schema: Schema } option is required`);
+ }
+ // json & jsonArg are only used from toJSON()
+ toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
+ const ctx = {
+ anchors: new Map(),
+ doc: this,
+ keep: !json,
+ mapAsMap: mapAsMap === true,
+ mapKeyWarned: false,
+ maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,
+ stringify
+ };
+ const res = toJS(this.contents, jsonArg !== null && jsonArg !== void 0 ? jsonArg : '', ctx);
+ if (typeof onAnchor === 'function')
+ for (const { count, res } of ctx.anchors.values())
+ onAnchor(res, count);
+ return typeof reviver === 'function'
+ ? applyReviver(reviver, { '': res }, '', res)
+ : res;
+ }
+ /**
+ * A JSON representation of the document `contents`.
+ *
+ * @param jsonArg Used by `JSON.stringify` to indicate the array index or
+ * property name.
+ */
+ toJSON(jsonArg, onAnchor) {
+ return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });
+ }
+ /** A YAML representation of the document. */
+ toString(options = {}) {
+ if (this.errors.length > 0)
+ throw new Error('Document with errors cannot be stringified');
+ if ('indent' in options &&
+ (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {
+ const s = JSON.stringify(options.indent);
+ throw new Error(`"indent" option must be a positive integer, not ${s}`);
+ }
+ return stringifyDocument(this, options);
+ }
+}
+function assertCollection(contents) {
+ if (isCollection(contents))
+ return true;
+ throw new Error('Expected a YAML collection as document contents');
+}
+
+export { Document };
diff --git a/node_modules/yaml/browser/dist/doc/anchors.js b/node_modules/yaml/browser/dist/doc/anchors.js
new file mode 100644
index 0000000..797e8a7
--- /dev/null
+++ b/node_modules/yaml/browser/dist/doc/anchors.js
@@ -0,0 +1,72 @@
+import { isScalar, isCollection } from '../nodes/Node.js';
+import { visit } from '../visit.js';
+
+/**
+ * Verify that the input string is a valid anchor.
+ *
+ * Will throw on errors.
+ */
+function anchorIsValid(anchor) {
+ if (/[\x00-\x19\s,[\]{}]/.test(anchor)) {
+ const sa = JSON.stringify(anchor);
+ const msg = `Anchor must not contain whitespace or control characters: ${sa}`;
+ throw new Error(msg);
+ }
+ return true;
+}
+function anchorNames(root) {
+ const anchors = new Set();
+ visit(root, {
+ Value(_key, node) {
+ if (node.anchor)
+ anchors.add(node.anchor);
+ }
+ });
+ return anchors;
+}
+/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */
+function findNewAnchor(prefix, exclude) {
+ for (let i = 1; true; ++i) {
+ const name = `${prefix}${i}`;
+ if (!exclude.has(name))
+ return name;
+ }
+}
+function createNodeAnchors(doc, prefix) {
+ const aliasObjects = [];
+ const sourceObjects = new Map();
+ let prevAnchors = null;
+ return {
+ onAnchor: (source) => {
+ aliasObjects.push(source);
+ if (!prevAnchors)
+ prevAnchors = anchorNames(doc);
+ const anchor = findNewAnchor(prefix, prevAnchors);
+ prevAnchors.add(anchor);
+ return anchor;
+ },
+ /**
+ * With circular references, the source node is only resolved after all
+ * of its child nodes are. This is why anchors are set only after all of
+ * the nodes have been created.
+ */
+ setAnchors: () => {
+ for (const source of aliasObjects) {
+ const ref = sourceObjects.get(source);
+ if (typeof ref === 'object' &&
+ ref.anchor &&
+ (isScalar(ref.node) || isCollection(ref.node))) {
+ ref.node.anchor = ref.anchor;
+ }
+ else {
+ const error = new Error('Failed to resolve repeated object (this should not happen)');
+ error.source = source;
+ throw error;
+ }
+ }
+ },
+ sourceObjects
+ };
+}
+
+export { anchorIsValid, anchorNames, createNodeAnchors, findNewAnchor };
diff --git a/node_modules/yaml/browser/dist/doc/applyReviver.js b/node_modules/yaml/browser/dist/doc/applyReviver.js
new file mode 100644
index 0000000..0e6a93c
--- /dev/null
+++ b/node_modules/yaml/browser/dist/doc/applyReviver.js
@@ -0,0 +1,54 @@
+/**
+ * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
+ * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
+ * 2021 edition: https://tc39.es/ecma262/#sec-json.parse
+ *
+ * Includes extensions for handling Map and Set objects.
+ */
+function applyReviver(reviver, obj, key, val) {
+ if (val && typeof val === 'object') {
+ if (Array.isArray(val)) {
+ for (let i = 0, len = val.length; i < len; ++i) {
+ const v0 = val[i];
+ const v1 = applyReviver(reviver, val, String(i), v0);
+ if (v1 === undefined)
+ delete val[i];
+ else if (v1 !== v0)
+ val[i] = v1;
+ }
+ }
+ else if (val instanceof Map) {
+ for (const k of Array.from(val.keys())) {
+ const v0 = val.get(k);
+ const v1 = applyReviver(reviver, val, k, v0);
+ if (v1 === undefined)
+ val.delete(k);
+ else if (v1 !== v0)
+ val.set(k, v1);
+ }
+ }
+ else if (val instanceof Set) {
+ for (const v0 of Array.from(val)) {
+ const v1 = applyReviver(reviver, val, v0, v0);
+ if (v1 === undefined)
+ val.delete(v0);
+ else if (v1 !== v0) {
+ val.delete(v0);
+ val.add(v1);
+ }
+ }
+ }
+ else {
+ for (const [k, v0] of Object.entries(val)) {
+ const v1 = applyReviver(reviver, val, k, v0);
+ if (v1 === undefined)
+ delete val[k];
+ else if (v1 !== v0)
+ val[k] = v1;
+ }
+ }
+ }
+ return reviver.call(obj, key, val);
+}
+
+export { applyReviver };
diff --git a/node_modules/yaml/browser/dist/doc/createNode.js b/node_modules/yaml/browser/dist/doc/createNode.js
new file mode 100644
index 0000000..8ad907d
--- /dev/null
+++ b/node_modules/yaml/browser/dist/doc/createNode.js
@@ -0,0 +1,87 @@
+import { Alias } from '../nodes/Alias.js';
+import { isNode, isPair, MAP, SEQ, isDocument } from '../nodes/Node.js';
+import { Scalar } from '../nodes/Scalar.js';
+
+const defaultTagPrefix = 'tag:yaml.org,2002:';
+function findTagObject(value, tagName, tags) {
+ var _a;
+ if (tagName) {
+ const match = tags.filter(t => t.tag === tagName);
+ const tagObj = (_a = match.find(t => !t.format)) !== null && _a !== void 0 ? _a : match[0];
+ if (!tagObj)
+ throw new Error(`Tag ${tagName} not found`);
+ return tagObj;
+ }
+ return tags.find(t => { var _a; return ((_a = t.identify) === null || _a === void 0 ? void 0 : _a.call(t, value)) && !t.format; });
+}
+function createNode(value, tagName, ctx) {
+ var _a, _b;
+ if (isDocument(value))
+ value = value.contents;
+ if (isNode(value))
+ return value;
+ if (isPair(value)) {
+ const map = (_b = (_a = ctx.schema[MAP]).createNode) === null || _b === void 0 ? void 0 : _b.call(_a, ctx.schema, null, ctx);
+ map.items.push(value);
+ return map;
+ }
+ if (value instanceof String ||
+ value instanceof Number ||
+ value instanceof Boolean ||
+ (typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere
+ ) {
+ // https://tc39.es/ecma262/#sec-serializejsonproperty
+ value = value.valueOf();
+ }
+ const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;
+ // Detect duplicate references to the same object & use Alias nodes for all
+ // after first. The `ref` wrapper allows for circular references to resolve.
+ let ref = undefined;
+ if (aliasDuplicateObjects && value && typeof value === 'object') {
+ ref = sourceObjects.get(value);
+ if (ref) {
+ if (!ref.anchor)
+ ref.anchor = onAnchor(value);
+ return new Alias(ref.anchor);
+ }
+ else {
+ ref = { anchor: null, node: null };
+ sourceObjects.set(value, ref);
+ }
+ }
+ if (tagName === null || tagName === void 0 ? void 0 : tagName.startsWith('!!'))
+ tagName = defaultTagPrefix + tagName.slice(2);
+ let tagObj = findTagObject(value, tagName, schema.tags);
+ if (!tagObj) {
+ if (value && typeof value.toJSON === 'function') {
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-call
+ value = value.toJSON();
+ }
+ if (!value || typeof value !== 'object') {
+ const node = new Scalar(value);
+ if (ref)
+ ref.node = node;
+ return node;
+ }
+ tagObj =
+ value instanceof Map
+ ? schema[MAP]
+ : Symbol.iterator in Object(value)
+ ? schema[SEQ]
+ : schema[MAP];
+ }
+ if (onTagObj) {
+ onTagObj(tagObj);
+ delete ctx.onTagObj;
+ }
+ const node = (tagObj === null || tagObj === void 0 ? void 0 : tagObj.createNode)
+ ? tagObj.createNode(ctx.schema, value, ctx)
+ : new Scalar(value);
+ if (tagName)
+ node.tag = tagName;
+ if (ref)
+ ref.node = node;
+ return node;
+}
+
+export { createNode };
diff --git a/node_modules/yaml/browser/dist/doc/directives.js b/node_modules/yaml/browser/dist/doc/directives.js
new file mode 100644
index 0000000..bdbc153
--- /dev/null
+++ b/node_modules/yaml/browser/dist/doc/directives.js
@@ -0,0 +1,169 @@
+import { isNode } from '../nodes/Node.js';
+import { visit } from '../visit.js';
+
+const escapeChars = {
+ '!': '%21',
+ ',': '%2C',
+ '[': '%5B',
+ ']': '%5D',
+ '{': '%7B',
+ '}': '%7D'
+};
+const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]);
+class Directives {
+ constructor(yaml, tags) {
+ /**
+ * The directives-end/doc-start marker `---`. If `null`, a marker may still be
+ * included in the document's stringified representation.
+ */
+ this.docStart = null;
+ /** The doc-end marker `...`. */
+ this.docEnd = false;
+ this.yaml = Object.assign({}, Directives.defaultYaml, yaml);
+ this.tags = Object.assign({}, Directives.defaultTags, tags);
+ }
+ clone() {
+ const copy = new Directives(this.yaml, this.tags);
+ copy.docStart = this.docStart;
+ return copy;
+ }
+ /**
+ * During parsing, get a Directives instance for the current document and
+ * update the stream state according to the current version's spec.
+ */
+ atDocument() {
+ const res = new Directives(this.yaml, this.tags);
+ switch (this.yaml.version) {
+ case '1.1':
+ this.atNextDocument = true;
+ break;
+ case '1.2':
+ this.atNextDocument = false;
+ this.yaml = {
+ explicit: Directives.defaultYaml.explicit,
+ version: '1.2'
+ };
+ this.tags = Object.assign({}, Directives.defaultTags);
+ break;
+ }
+ return res;
+ }
+ /**
+ * @param onError - May be called even if the action was successful
+ * @returns `true` on success
+ */
+ add(line, onError) {
+ if (this.atNextDocument) {
+ this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };
+ this.tags = Object.assign({}, Directives.defaultTags);
+ this.atNextDocument = false;
+ }
+ const parts = line.trim().split(/[ \t]+/);
+ const name = parts.shift();
+ switch (name) {
+ case '%TAG': {
+ if (parts.length !== 2) {
+ onError(0, '%TAG directive should contain exactly two parts');
+ if (parts.length < 2)
+ return false;
+ }
+ const [handle, prefix] = parts;
+ this.tags[handle] = prefix;
+ return true;
+ }
+ case '%YAML': {
+ this.yaml.explicit = true;
+ if (parts.length !== 1) {
+ onError(0, '%YAML directive should contain exactly one part');
+ return false;
+ }
+ const [version] = parts;
+ if (version === '1.1' || version === '1.2') {
+ this.yaml.version = version;
+ return true;
+ }
+ else {
+ const isValid = /^\d+\.\d+$/.test(version);
+ onError(6, `Unsupported YAML version ${version}`, isValid);
+ return false;
+ }
+ }
+ default:
+ onError(0, `Unknown directive ${name}`, true);
+ return false;
+ }
+ }
+ /**
+ * Resolves a tag, matching handles to those defined in %TAG directives.
+ *
+ * @returns Resolved tag, which may also be the non-specific tag `'!'` or a
+ * `'!local'` tag, or `null` if unresolvable.
+ */
+ tagName(source, onError) {
+ if (source === '!')
+ return '!'; // non-specific tag
+ if (source[0] !== '!') {
+ onError(`Not a valid tag: ${source}`);
+ return null;
+ }
+ if (source[1] === '<') {
+ const verbatim = source.slice(2, -1);
+ if (verbatim === '!' || verbatim === '!!') {
+ onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);
+ return null;
+ }
+ if (source[source.length - 1] !== '>')
+ onError('Verbatim tags must end with a >');
+ return verbatim;
+ }
+ const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);
+ if (!suffix)
+ onError(`The ${source} tag has no suffix`);
+ const prefix = this.tags[handle];
+ if (prefix)
+ return prefix + decodeURIComponent(suffix);
+ if (handle === '!')
+ return source; // local tag
+ onError(`Could not resolve tag: ${source}`);
+ return null;
+ }
+ /**
+ * Given a fully resolved tag, returns its printable string form,
+ * taking into account current tag prefixes and defaults.
+ */
+ tagString(tag) {
+ for (const [handle, prefix] of Object.entries(this.tags)) {
+ if (tag.startsWith(prefix))
+ return handle + escapeTagName(tag.substring(prefix.length));
+ }
+ return tag[0] === '!' ? tag : `!<${tag}>`;
+ }
+ toString(doc) {
+ const lines = this.yaml.explicit
+ ? [`%YAML ${this.yaml.version || '1.2'}`]
+ : [];
+ const tagEntries = Object.entries(this.tags);
+ let tagNames;
+ if (doc && tagEntries.length > 0 && isNode(doc.contents)) {
+ const tags = {};
+ visit(doc.contents, (_key, node) => {
+ if (isNode(node) && node.tag)
+ tags[node.tag] = true;
+ });
+ tagNames = Object.keys(tags);
+ }
+ else
+ tagNames = [];
+ for (const [handle, prefix] of tagEntries) {
+ if (handle === '!!' && prefix === 'tag:yaml.org,2002:')
+ continue;
+ if (!doc || tagNames.some(tn => tn.startsWith(prefix)))
+ lines.push(`%TAG ${handle} ${prefix}`);
+ }
+ return lines.join('\n');
+ }
+}
+Directives.defaultYaml = { explicit: false, version: '1.2' };
+Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' };
+
+export { Directives };
diff --git a/node_modules/yaml/browser/dist/errors.js b/node_modules/yaml/browser/dist/errors.js
new file mode 100644
index 0000000..2fac9b7
--- /dev/null
+++ b/node_modules/yaml/browser/dist/errors.js
@@ -0,0 +1,57 @@
+class YAMLError extends Error {
+ constructor(name, pos, code, message) {
+ super();
+ this.name = name;
+ this.code = code;
+ this.message = message;
+ this.pos = pos;
+ }
+}
+class YAMLParseError extends YAMLError {
+ constructor(pos, code, message) {
+ super('YAMLParseError', pos, code, message);
+ }
+}
+class YAMLWarning extends YAMLError {
+ constructor(pos, code, message) {
+ super('YAMLWarning', pos, code, message);
+ }
+}
+const prettifyError = (src, lc) => (error) => {
+ if (error.pos[0] === -1)
+ return;
+ error.linePos = error.pos.map(pos => lc.linePos(pos));
+ const { line, col } = error.linePos[0];
+ error.message += ` at line ${line}, column ${col}`;
+ let ci = col - 1;
+ let lineStr = src
+ .substring(lc.lineStarts[line - 1], lc.lineStarts[line])
+ .replace(/[\n\r]+$/, '');
+ // Trim to max 80 chars, keeping col position near the middle
+ if (ci >= 60 && lineStr.length > 80) {
+ const trimStart = Math.min(ci - 39, lineStr.length - 79);
+ lineStr = '…' + lineStr.substring(trimStart);
+ ci -= trimStart - 1;
+ }
+ if (lineStr.length > 80)
+ lineStr = lineStr.substring(0, 79) + '…';
+ // Include previous line in context if pointing at line start
+ if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {
+ // Regexp won't match if start is trimmed
+ let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);
+ if (prev.length > 80)
+ prev = prev.substring(0, 79) + '…\n';
+ lineStr = prev + lineStr;
+ }
+ if (/[^ ]/.test(lineStr)) {
+ let count = 1;
+ const end = error.linePos[1];
+ if (end && end.line === line && end.col > col) {
+ count = Math.min(end.col - col, 80 - ci);
+ }
+ const pointer = ' '.repeat(ci) + '^'.repeat(count);
+ error.message += `:\n\n${lineStr}\n${pointer}\n`;
+ }
+};
+
+export { YAMLError, YAMLParseError, YAMLWarning, prettifyError };
diff --git a/node_modules/yaml/browser/dist/index.js b/node_modules/yaml/browser/dist/index.js
new file mode 100644
index 0000000..96765af
--- /dev/null
+++ b/node_modules/yaml/browser/dist/index.js
@@ -0,0 +1,17 @@
+export { Composer } from './compose/composer.js';
+export { Document } from './doc/Document.js';
+export { Schema } from './schema/Schema.js';
+export { YAMLError, YAMLParseError, YAMLWarning } from './errors.js';
+export { Alias } from './nodes/Alias.js';
+export { isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq } from './nodes/Node.js';
+export { Pair } from './nodes/Pair.js';
+export { Scalar } from './nodes/Scalar.js';
+export { YAMLMap } from './nodes/YAMLMap.js';
+export { YAMLSeq } from './nodes/YAMLSeq.js';
+import * as cst from './parse/cst.js';
+export { cst as CST };
+export { Lexer } from './parse/lexer.js';
+export { LineCounter } from './parse/line-counter.js';
+export { Parser } from './parse/parser.js';
+export { parse, parseAllDocuments, parseDocument, stringify } from './public-api.js';
+export { visit, visitAsync } from './visit.js';
diff --git a/node_modules/yaml/browser/dist/log.js b/node_modules/yaml/browser/dist/log.js
new file mode 100644
index 0000000..aa5ba56
--- /dev/null
+++ b/node_modules/yaml/browser/dist/log.js
@@ -0,0 +1,14 @@
+function debug(logLevel, ...messages) {
+ if (logLevel === 'debug')
+ console.log(...messages);
+}
+function warn(logLevel, warning) {
+ if (logLevel === 'debug' || logLevel === 'warn') {
+ if (typeof process !== 'undefined' && process.emitWarning)
+ process.emitWarning(warning);
+ else
+ console.warn(warning);
+ }
+}
+
+export { debug, warn };
diff --git a/node_modules/yaml/browser/dist/node_modules/tslib/tslib.es6.js b/node_modules/yaml/browser/dist/node_modules/tslib/tslib.es6.js
new file mode 100644
index 0000000..4964191
--- /dev/null
+++ b/node_modules/yaml/browser/dist/node_modules/tslib/tslib.es6.js
@@ -0,0 +1,164 @@
+/*! *****************************************************************************
+Copyright (c) Microsoft Corporation.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+***************************************************************************** */
+
+/* global Reflect, Promise */
+var extendStatics = function (d, b) {
+ extendStatics = Object.setPrototypeOf || {
+ __proto__: []
+ } instanceof Array && function (d, b) {
+ d.__proto__ = b;
+ } || function (d, b) {
+ for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
+ };
+
+ return extendStatics(d, b);
+};
+
+function __extends(d, b) {
+ if (typeof b !== "function" && b !== null) throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
+ extendStatics(d, b);
+
+ function __() {
+ this.constructor = d;
+ }
+
+ d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+}
+function __generator(thisArg, body) {
+ var _ = {
+ label: 0,
+ sent: function () {
+ if (t[0] & 1) throw t[1];
+ return t[1];
+ },
+ trys: [],
+ ops: []
+ },
+ f,
+ y,
+ t,
+ g;
+ return g = {
+ next: verb(0),
+ "throw": verb(1),
+ "return": verb(2)
+ }, typeof Symbol === "function" && (g[Symbol.iterator] = function () {
+ return this;
+ }), g;
+
+ function verb(n) {
+ return function (v) {
+ return step([n, v]);
+ };
+ }
+
+ function step(op) {
+ if (f) throw new TypeError("Generator is already executing.");
+
+ while (_) try {
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
+ if (y = 0, t) op = [op[0] & 2, t.value];
+
+ switch (op[0]) {
+ case 0:
+ case 1:
+ t = op;
+ break;
+
+ case 4:
+ _.label++;
+ return {
+ value: op[1],
+ done: false
+ };
+
+ case 5:
+ _.label++;
+ y = op[1];
+ op = [0];
+ continue;
+
+ case 7:
+ op = _.ops.pop();
+
+ _.trys.pop();
+
+ continue;
+
+ default:
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) {
+ _ = 0;
+ continue;
+ }
+
+ if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) {
+ _.label = op[1];
+ break;
+ }
+
+ if (op[0] === 6 && _.label < t[1]) {
+ _.label = t[1];
+ t = op;
+ break;
+ }
+
+ if (t && _.label < t[2]) {
+ _.label = t[2];
+
+ _.ops.push(op);
+
+ break;
+ }
+
+ if (t[2]) _.ops.pop();
+
+ _.trys.pop();
+
+ continue;
+ }
+
+ op = body.call(thisArg, _);
+ } catch (e) {
+ op = [6, e];
+ y = 0;
+ } finally {
+ f = t = 0;
+ }
+
+ if (op[0] & 5) throw op[1];
+ return {
+ value: op[0] ? op[1] : void 0,
+ done: true
+ };
+ }
+}
+function __values(o) {
+ var s = typeof Symbol === "function" && Symbol.iterator,
+ m = s && o[s],
+ i = 0;
+ if (m) return m.call(o);
+ if (o && typeof o.length === "number") return {
+ next: function () {
+ if (o && i >= o.length) o = void 0;
+ return {
+ value: o && o[i++],
+ done: !o
+ };
+ }
+ };
+ throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
+}
+
+export { __extends, __generator, __values };
diff --git a/node_modules/yaml/browser/dist/nodes/Alias.js b/node_modules/yaml/browser/dist/nodes/Alias.js
new file mode 100644
index 0000000..141b811
--- /dev/null
+++ b/node_modules/yaml/browser/dist/nodes/Alias.js
@@ -0,0 +1,94 @@
+import { anchorIsValid } from '../doc/anchors.js';
+import { visit } from '../visit.js';
+import { NodeBase, ALIAS, isAlias, isCollection, isPair } from './Node.js';
+
+class Alias extends NodeBase {
+ constructor(source) {
+ super(ALIAS);
+ this.source = source;
+ Object.defineProperty(this, 'tag', {
+ set() {
+ throw new Error('Alias nodes cannot have tags');
+ }
+ });
+ }
+ /**
+ * Resolve the value of this alias within `doc`, finding the last
+ * instance of the `source` anchor before this node.
+ */
+ resolve(doc) {
+ let found = undefined;
+ visit(doc, {
+ Node: (_key, node) => {
+ if (node === this)
+ return visit.BREAK;
+ if (node.anchor === this.source)
+ found = node;
+ }
+ });
+ return found;
+ }
+ toJSON(_arg, ctx) {
+ if (!ctx)
+ return { source: this.source };
+ const { anchors, doc, maxAliasCount } = ctx;
+ const source = this.resolve(doc);
+ if (!source) {
+ const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
+ throw new ReferenceError(msg);
+ }
+ const data = anchors.get(source);
+ /* istanbul ignore if */
+ if (!data || data.res === undefined) {
+ const msg = 'This should not happen: Alias anchor was not resolved?';
+ throw new ReferenceError(msg);
+ }
+ if (maxAliasCount >= 0) {
+ data.count += 1;
+ if (data.aliasCount === 0)
+ data.aliasCount = getAliasCount(doc, source, anchors);
+ if (data.count * data.aliasCount > maxAliasCount) {
+ const msg = 'Excessive alias count indicates a resource exhaustion attack';
+ throw new ReferenceError(msg);
+ }
+ }
+ return data.res;
+ }
+ toString(ctx, _onComment, _onChompKeep) {
+ const src = `*${this.source}`;
+ if (ctx) {
+ anchorIsValid(this.source);
+ if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {
+ const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
+ throw new Error(msg);
+ }
+ if (ctx.implicitKey)
+ return `${src} `;
+ }
+ return src;
+ }
+}
+function getAliasCount(doc, node, anchors) {
+ if (isAlias(node)) {
+ const source = node.resolve(doc);
+ const anchor = anchors && source && anchors.get(source);
+ return anchor ? anchor.count * anchor.aliasCount : 0;
+ }
+ else if (isCollection(node)) {
+ let count = 0;
+ for (const item of node.items) {
+ const c = getAliasCount(doc, item, anchors);
+ if (c > count)
+ count = c;
+ }
+ return count;
+ }
+ else if (isPair(node)) {
+ const kc = getAliasCount(doc, node.key, anchors);
+ const vc = getAliasCount(doc, node.value, anchors);
+ return Math.max(kc, vc);
+ }
+ return 1;
+}
+
+export { Alias };
diff --git a/node_modules/yaml/browser/dist/nodes/Collection.js b/node_modules/yaml/browser/dist/nodes/Collection.js
new file mode 100644
index 0000000..963e610
--- /dev/null
+++ b/node_modules/yaml/browser/dist/nodes/Collection.js
@@ -0,0 +1,146 @@
+import { createNode } from '../doc/createNode.js';
+import { NodeBase, isNode, isPair, isCollection, isScalar } from './Node.js';
+
+function collectionFromPath(schema, path, value) {
+ let v = value;
+ for (let i = path.length - 1; i >= 0; --i) {
+ const k = path[i];
+ if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {
+ const a = [];
+ a[k] = v;
+ v = a;
+ }
+ else {
+ v = new Map([[k, v]]);
+ }
+ }
+ return createNode(v, undefined, {
+ aliasDuplicateObjects: false,
+ keepUndefined: false,
+ onAnchor: () => {
+ throw new Error('This should not happen, please report a bug.');
+ },
+ schema,
+ sourceObjects: new Map()
+ });
+}
+// null, undefined, or an empty non-string iterable (e.g. [])
+const isEmptyPath = (path) => path == null ||
+ (typeof path === 'object' && !!path[Symbol.iterator]().next().done);
+class Collection extends NodeBase {
+ constructor(type, schema) {
+ super(type);
+ Object.defineProperty(this, 'schema', {
+ value: schema,
+ configurable: true,
+ enumerable: false,
+ writable: true
+ });
+ }
+ /**
+ * Create a copy of this collection.
+ *
+ * @param schema - If defined, overwrites the original's schema
+ */
+ clone(schema) {
+ const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
+ if (schema)
+ copy.schema = schema;
+ copy.items = copy.items.map(it => isNode(it) || isPair(it) ? it.clone(schema) : it);
+ if (this.range)
+ copy.range = this.range.slice();
+ return copy;
+ }
+ /**
+ * Adds a value to the collection. For `!!map` and `!!omap` the value must
+ * be a Pair instance or a `{ key, value }` object, which may not have a key
+ * that already exists in the map.
+ */
+ addIn(path, value) {
+ if (isEmptyPath(path))
+ this.add(value);
+ else {
+ const [key, ...rest] = path;
+ const node = this.get(key, true);
+ if (isCollection(node))
+ node.addIn(rest, value);
+ else if (node === undefined && this.schema)
+ this.set(key, collectionFromPath(this.schema, rest, value));
+ else
+ throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
+ }
+ }
+ /**
+ * Removes a value from the collection.
+ * @returns `true` if the item was found and removed.
+ */
+ deleteIn(path) {
+ const [key, ...rest] = path;
+ if (rest.length === 0)
+ return this.delete(key);
+ const node = this.get(key, true);
+ if (isCollection(node))
+ return node.deleteIn(rest);
+ else
+ throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
+ }
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ getIn(path, keepScalar) {
+ const [key, ...rest] = path;
+ const node = this.get(key, true);
+ if (rest.length === 0)
+ return !keepScalar && isScalar(node) ? node.value : node;
+ else
+ return isCollection(node) ? node.getIn(rest, keepScalar) : undefined;
+ }
+ hasAllNullValues(allowScalar) {
+ return this.items.every(node => {
+ if (!isPair(node))
+ return false;
+ const n = node.value;
+ return (n == null ||
+ (allowScalar &&
+ isScalar(n) &&
+ n.value == null &&
+ !n.commentBefore &&
+ !n.comment &&
+ !n.tag));
+ });
+ }
+ /**
+ * Checks if the collection includes a value with the key `key`.
+ */
+ hasIn(path) {
+ const [key, ...rest] = path;
+ if (rest.length === 0)
+ return this.has(key);
+ const node = this.get(key, true);
+ return isCollection(node) ? node.hasIn(rest) : false;
+ }
+ /**
+ * Sets a value in this collection. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ setIn(path, value) {
+ const [key, ...rest] = path;
+ if (rest.length === 0) {
+ this.set(key, value);
+ }
+ else {
+ const node = this.get(key, true);
+ if (isCollection(node))
+ node.setIn(rest, value);
+ else if (node === undefined && this.schema)
+ this.set(key, collectionFromPath(this.schema, rest, value));
+ else
+ throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
+ }
+ }
+}
+Collection.maxFlowStringSingleLineLength = 60;
+
+export { Collection, collectionFromPath, isEmptyPath };
diff --git a/node_modules/yaml/browser/dist/nodes/Node.js b/node_modules/yaml/browser/dist/nodes/Node.js
new file mode 100644
index 0000000..718f363
--- /dev/null
+++ b/node_modules/yaml/browser/dist/nodes/Node.js
@@ -0,0 +1,48 @@
+const ALIAS = Symbol.for('yaml.alias');
+const DOC = Symbol.for('yaml.document');
+const MAP = Symbol.for('yaml.map');
+const PAIR = Symbol.for('yaml.pair');
+const SCALAR = Symbol.for('yaml.scalar');
+const SEQ = Symbol.for('yaml.seq');
+const NODE_TYPE = Symbol.for('yaml.node.type');
+const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;
+const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;
+const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;
+const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;
+const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;
+const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;
+function isCollection(node) {
+ if (node && typeof node === 'object')
+ switch (node[NODE_TYPE]) {
+ case MAP:
+ case SEQ:
+ return true;
+ }
+ return false;
+}
+function isNode(node) {
+ if (node && typeof node === 'object')
+ switch (node[NODE_TYPE]) {
+ case ALIAS:
+ case MAP:
+ case SCALAR:
+ case SEQ:
+ return true;
+ }
+ return false;
+}
+const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;
+class NodeBase {
+ constructor(type) {
+ Object.defineProperty(this, NODE_TYPE, { value: type });
+ }
+ /** Create a copy of this node. */
+ clone() {
+ const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
+ if (this.range)
+ copy.range = this.range.slice();
+ return copy;
+ }
+}
+
+export { ALIAS, DOC, MAP, NODE_TYPE, NodeBase, PAIR, SCALAR, SEQ, hasAnchor, isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq };
diff --git a/node_modules/yaml/browser/dist/nodes/Pair.js b/node_modules/yaml/browser/dist/nodes/Pair.js
new file mode 100644
index 0000000..3fe0fa3
--- /dev/null
+++ b/node_modules/yaml/browser/dist/nodes/Pair.js
@@ -0,0 +1,36 @@
+import { createNode } from '../doc/createNode.js';
+import { stringifyPair } from '../stringify/stringifyPair.js';
+import { addPairToJSMap } from './addPairToJSMap.js';
+import { NODE_TYPE, PAIR, isNode } from './Node.js';
+
+function createPair(key, value, ctx) {
+ const k = createNode(key, undefined, ctx);
+ const v = createNode(value, undefined, ctx);
+ return new Pair(k, v);
+}
+class Pair {
+ constructor(key, value = null) {
+ Object.defineProperty(this, NODE_TYPE, { value: PAIR });
+ this.key = key;
+ this.value = value;
+ }
+ clone(schema) {
+ let { key, value } = this;
+ if (isNode(key))
+ key = key.clone(schema);
+ if (isNode(value))
+ value = value.clone(schema);
+ return new Pair(key, value);
+ }
+ toJSON(_, ctx) {
+ const pair = (ctx === null || ctx === void 0 ? void 0 : ctx.mapAsMap) ? new Map() : {};
+ return addPairToJSMap(ctx, pair, this);
+ }
+ toString(ctx, onComment, onChompKeep) {
+ return (ctx === null || ctx === void 0 ? void 0 : ctx.doc)
+ ? stringifyPair(this, ctx, onComment, onChompKeep)
+ : JSON.stringify(this);
+ }
+}
+
+export { Pair, createPair };
diff --git a/node_modules/yaml/browser/dist/nodes/Scalar.js b/node_modules/yaml/browser/dist/nodes/Scalar.js
new file mode 100644
index 0000000..e7f22a7
--- /dev/null
+++ b/node_modules/yaml/browser/dist/nodes/Scalar.js
@@ -0,0 +1,23 @@
+import { NodeBase, SCALAR } from './Node.js';
+import { toJS } from './toJS.js';
+
+const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');
+class Scalar extends NodeBase {
+ constructor(value) {
+ super(SCALAR);
+ this.value = value;
+ }
+ toJSON(arg, ctx) {
+ return (ctx === null || ctx === void 0 ? void 0 : ctx.keep) ? this.value : toJS(this.value, arg, ctx);
+ }
+ toString() {
+ return String(this.value);
+ }
+}
+Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED';
+Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL';
+Scalar.PLAIN = 'PLAIN';
+Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';
+Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE';
+
+export { Scalar, isScalarValue };
diff --git a/node_modules/yaml/browser/dist/nodes/YAMLMap.js b/node_modules/yaml/browser/dist/nodes/YAMLMap.js
new file mode 100644
index 0000000..a25e480
--- /dev/null
+++ b/node_modules/yaml/browser/dist/nodes/YAMLMap.js
@@ -0,0 +1,117 @@
+import { stringifyCollection } from '../stringify/stringifyCollection.js';
+import { addPairToJSMap } from './addPairToJSMap.js';
+import { Collection } from './Collection.js';
+import { isPair, isScalar, MAP } from './Node.js';
+import { Pair } from './Pair.js';
+import { isScalarValue } from './Scalar.js';
+
+function findPair(items, key) {
+ const k = isScalar(key) ? key.value : key;
+ for (const it of items) {
+ if (isPair(it)) {
+ if (it.key === key || it.key === k)
+ return it;
+ if (isScalar(it.key) && it.key.value === k)
+ return it;
+ }
+ }
+ return undefined;
+}
+class YAMLMap extends Collection {
+ constructor(schema) {
+ super(MAP, schema);
+ this.items = [];
+ }
+ static get tagName() {
+ return 'tag:yaml.org,2002:map';
+ }
+ /**
+ * Adds a value to the collection.
+ *
+ * @param overwrite - If not set `true`, using a key that is already in the
+ * collection will throw. Otherwise, overwrites the previous value.
+ */
+ add(pair, overwrite) {
+ var _a;
+ let _pair;
+ if (isPair(pair))
+ _pair = pair;
+ else if (!pair || typeof pair !== 'object' || !('key' in pair)) {
+ // In TypeScript, this never happens.
+ _pair = new Pair(pair, pair.value);
+ }
+ else
+ _pair = new Pair(pair.key, pair.value);
+ const prev = findPair(this.items, _pair.key);
+ const sortEntries = (_a = this.schema) === null || _a === void 0 ? void 0 : _a.sortMapEntries;
+ if (prev) {
+ if (!overwrite)
+ throw new Error(`Key ${_pair.key} already set`);
+ // For scalars, keep the old node & its comments and anchors
+ if (isScalar(prev.value) && isScalarValue(_pair.value))
+ prev.value.value = _pair.value;
+ else
+ prev.value = _pair.value;
+ }
+ else if (sortEntries) {
+ const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);
+ if (i === -1)
+ this.items.push(_pair);
+ else
+ this.items.splice(i, 0, _pair);
+ }
+ else {
+ this.items.push(_pair);
+ }
+ }
+ delete(key) {
+ const it = findPair(this.items, key);
+ if (!it)
+ return false;
+ const del = this.items.splice(this.items.indexOf(it), 1);
+ return del.length > 0;
+ }
+ get(key, keepScalar) {
+ const it = findPair(this.items, key);
+ const node = it === null || it === void 0 ? void 0 : it.value;
+ return !keepScalar && isScalar(node) ? node.value : node;
+ }
+ has(key) {
+ return !!findPair(this.items, key);
+ }
+ set(key, value) {
+ this.add(new Pair(key, value), true);
+ }
+ /**
+ * @param ctx - Conversion context, originally set in Document#toJS()
+ * @param {Class} Type - If set, forces the returned collection type
+ * @returns Instance of Type, Map, or Object
+ */
+ toJSON(_, ctx, Type) {
+ const map = Type ? new Type() : (ctx === null || ctx === void 0 ? void 0 : ctx.mapAsMap) ? new Map() : {};
+ if (ctx === null || ctx === void 0 ? void 0 : ctx.onCreate)
+ ctx.onCreate(map);
+ for (const item of this.items)
+ addPairToJSMap(ctx, map, item);
+ return map;
+ }
+ toString(ctx, onComment, onChompKeep) {
+ if (!ctx)
+ return JSON.stringify(this);
+ for (const item of this.items) {
+ if (!isPair(item))
+ throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);
+ }
+ if (!ctx.allNullValues && this.hasAllNullValues(false))
+ ctx = Object.assign({}, ctx, { allNullValues: true });
+ return stringifyCollection(this, ctx, {
+ blockItemPrefix: '',
+ flowChars: { start: '{', end: '}' },
+ itemIndent: ctx.indent || '',
+ onChompKeep,
+ onComment
+ });
+ }
+}
+
+export { YAMLMap, findPair };
diff --git a/node_modules/yaml/browser/dist/nodes/YAMLSeq.js b/node_modules/yaml/browser/dist/nodes/YAMLSeq.js
new file mode 100644
index 0000000..2fe3e3a
--- /dev/null
+++ b/node_modules/yaml/browser/dist/nodes/YAMLSeq.js
@@ -0,0 +1,105 @@
+import { stringifyCollection } from '../stringify/stringifyCollection.js';
+import { Collection } from './Collection.js';
+import { SEQ, isScalar } from './Node.js';
+import { isScalarValue } from './Scalar.js';
+import { toJS } from './toJS.js';
+
+class YAMLSeq extends Collection {
+ constructor(schema) {
+ super(SEQ, schema);
+ this.items = [];
+ }
+ static get tagName() {
+ return 'tag:yaml.org,2002:seq';
+ }
+ add(value) {
+ this.items.push(value);
+ }
+ /**
+ * Removes a value from the collection.
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ *
+ * @returns `true` if the item was found and removed.
+ */
+ delete(key) {
+ const idx = asItemIndex(key);
+ if (typeof idx !== 'number')
+ return false;
+ const del = this.items.splice(idx, 1);
+ return del.length > 0;
+ }
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ */
+ get(key, keepScalar) {
+ const idx = asItemIndex(key);
+ if (typeof idx !== 'number')
+ return undefined;
+ const it = this.items[idx];
+ return !keepScalar && isScalar(it) ? it.value : it;
+ }
+ /**
+ * Checks if the collection includes a value with the key `key`.
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ */
+ has(key) {
+ const idx = asItemIndex(key);
+ return typeof idx === 'number' && idx < this.items.length;
+ }
+ /**
+ * Sets a value in this collection. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ *
+ * If `key` does not contain a representation of an integer, this will throw.
+ * It may be wrapped in a `Scalar`.
+ */
+ set(key, value) {
+ const idx = asItemIndex(key);
+ if (typeof idx !== 'number')
+ throw new Error(`Expected a valid index, not ${key}.`);
+ const prev = this.items[idx];
+ if (isScalar(prev) && isScalarValue(value))
+ prev.value = value;
+ else
+ this.items[idx] = value;
+ }
+ toJSON(_, ctx) {
+ const seq = [];
+ if (ctx === null || ctx === void 0 ? void 0 : ctx.onCreate)
+ ctx.onCreate(seq);
+ let i = 0;
+ for (const item of this.items)
+ seq.push(toJS(item, String(i++), ctx));
+ return seq;
+ }
+ toString(ctx, onComment, onChompKeep) {
+ if (!ctx)
+ return JSON.stringify(this);
+ return stringifyCollection(this, ctx, {
+ blockItemPrefix: '- ',
+ flowChars: { start: '[', end: ']' },
+ itemIndent: (ctx.indent || '') + ' ',
+ onChompKeep,
+ onComment
+ });
+ }
+}
+function asItemIndex(key) {
+ let idx = isScalar(key) ? key.value : key;
+ if (idx && typeof idx === 'string')
+ idx = Number(idx);
+ return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0
+ ? idx
+ : null;
+}
+
+export { YAMLSeq };
diff --git a/node_modules/yaml/browser/dist/nodes/addPairToJSMap.js b/node_modules/yaml/browser/dist/nodes/addPairToJSMap.js
new file mode 100644
index 0000000..1310766
--- /dev/null
+++ b/node_modules/yaml/browser/dist/nodes/addPairToJSMap.js
@@ -0,0 +1,104 @@
+import { warn } from '../log.js';
+import { createStringifyContext } from '../stringify/stringify.js';
+import { isAlias, isSeq, isScalar, isMap, isNode } from './Node.js';
+import { Scalar } from './Scalar.js';
+import { toJS } from './toJS.js';
+
+const MERGE_KEY = '<<';
+function addPairToJSMap(ctx, map, { key, value }) {
+ if ((ctx === null || ctx === void 0 ? void 0 : ctx.doc.schema.merge) && isMergeKey(key)) {
+ value = isAlias(value) ? value.resolve(ctx.doc) : value;
+ if (isSeq(value))
+ for (const it of value.items)
+ mergeToJSMap(ctx, map, it);
+ else if (Array.isArray(value))
+ for (const it of value)
+ mergeToJSMap(ctx, map, it);
+ else
+ mergeToJSMap(ctx, map, value);
+ }
+ else {
+ const jsKey = toJS(key, '', ctx);
+ if (map instanceof Map) {
+ map.set(jsKey, toJS(value, jsKey, ctx));
+ }
+ else if (map instanceof Set) {
+ map.add(jsKey);
+ }
+ else {
+ const stringKey = stringifyKey(key, jsKey, ctx);
+ const jsValue = toJS(value, stringKey, ctx);
+ if (stringKey in map)
+ Object.defineProperty(map, stringKey, {
+ value: jsValue,
+ writable: true,
+ enumerable: true,
+ configurable: true
+ });
+ else
+ map[stringKey] = jsValue;
+ }
+ }
+ return map;
+}
+const isMergeKey = (key) => key === MERGE_KEY ||
+ (isScalar(key) &&
+ key.value === MERGE_KEY &&
+ (!key.type || key.type === Scalar.PLAIN));
+// If the value associated with a merge key is a single mapping node, each of
+// its key/value pairs is inserted into the current mapping, unless the key
+// already exists in it. If the value associated with the merge key is a
+// sequence, then this sequence is expected to contain mapping nodes and each
+// of these nodes is merged in turn according to its order in the sequence.
+// Keys in mapping nodes earlier in the sequence override keys specified in
+// later mapping nodes. -- http://yaml.org/type/merge.html
+function mergeToJSMap(ctx, map, value) {
+ const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value;
+ if (!isMap(source))
+ throw new Error('Merge sources must be maps or map aliases');
+ const srcMap = source.toJSON(null, ctx, Map);
+ for (const [key, value] of srcMap) {
+ if (map instanceof Map) {
+ if (!map.has(key))
+ map.set(key, value);
+ }
+ else if (map instanceof Set) {
+ map.add(key);
+ }
+ else if (!Object.prototype.hasOwnProperty.call(map, key)) {
+ Object.defineProperty(map, key, {
+ value,
+ writable: true,
+ enumerable: true,
+ configurable: true
+ });
+ }
+ }
+ return map;
+}
+function stringifyKey(key, jsKey, ctx) {
+ if (jsKey === null)
+ return '';
+ if (typeof jsKey !== 'object')
+ return String(jsKey);
+ if (isNode(key) && ctx && ctx.doc) {
+ const strCtx = createStringifyContext(ctx.doc, {});
+ strCtx.anchors = new Set();
+ for (const node of ctx.anchors.keys())
+ strCtx.anchors.add(node.anchor);
+ strCtx.inFlow = true;
+ strCtx.inStringifyKey = true;
+ const strKey = key.toString(strCtx);
+ if (!ctx.mapKeyWarned) {
+ let jsonStr = JSON.stringify(strKey);
+ if (jsonStr.length > 40)
+ jsonStr = jsonStr.substring(0, 36) + '..."';
+ warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);
+ ctx.mapKeyWarned = true;
+ }
+ return strKey;
+ }
+ return JSON.stringify(jsKey);
+}
+
+export { addPairToJSMap };
diff --git a/node_modules/yaml/browser/dist/nodes/toJS.js b/node_modules/yaml/browser/dist/nodes/toJS.js
new file mode 100644
index 0000000..436c7e8
--- /dev/null
+++ b/node_modules/yaml/browser/dist/nodes/toJS.js
@@ -0,0 +1,37 @@
+import { hasAnchor } from './Node.js';
+
+/**
+ * Recursively convert any node or its contents to native JavaScript
+ *
+ * @param value - The input value
+ * @param arg - If `value` defines a `toJSON()` method, use this
+ * as its first argument
+ * @param ctx - Conversion context, originally set in Document#toJS(). If
+ * `{ keep: true }` is not set, output should be suitable for JSON
+ * stringification.
+ */
+function toJS(value, arg, ctx) {
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-return
+ if (Array.isArray(value))
+ return value.map((v, i) => toJS(v, String(i), ctx));
+ if (value && typeof value.toJSON === 'function') {
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-call
+ if (!ctx || !hasAnchor(value))
+ return value.toJSON(arg, ctx);
+ const data = { aliasCount: 0, count: 1, res: undefined };
+ ctx.anchors.set(value, data);
+ ctx.onCreate = res => {
+ data.res = res;
+ delete ctx.onCreate;
+ };
+ const res = value.toJSON(arg, ctx);
+ if (ctx.onCreate)
+ ctx.onCreate(res);
+ return res;
+ }
+ if (typeof value === 'bigint' && !(ctx === null || ctx === void 0 ? void 0 : ctx.keep))
+ return Number(value);
+ return value;
+}
+
+export { toJS };
diff --git a/node_modules/yaml/browser/dist/parse/cst-scalar.js b/node_modules/yaml/browser/dist/parse/cst-scalar.js
new file mode 100644
index 0000000..0b99018
--- /dev/null
+++ b/node_modules/yaml/browser/dist/parse/cst-scalar.js
@@ -0,0 +1,215 @@
+import { resolveBlockScalar } from '../compose/resolve-block-scalar.js';
+import { resolveFlowScalar } from '../compose/resolve-flow-scalar.js';
+import { YAMLParseError } from '../errors.js';
+import { stringifyString } from '../stringify/stringifyString.js';
+
+function resolveAsScalar(token, strict = true, onError) {
+ if (token) {
+ const _onError = (pos, code, message) => {
+ const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;
+ if (onError)
+ onError(offset, code, message);
+ else
+ throw new YAMLParseError([offset, offset + 1], code, message);
+ };
+ switch (token.type) {
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ return resolveFlowScalar(token, strict, _onError);
+ case 'block-scalar':
+ return resolveBlockScalar(token, strict, _onError);
+ }
+ }
+ return null;
+}
+/**
+ * Create a new scalar token with `value`
+ *
+ * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
+ * as this function does not support any schema operations and won't check for such conflicts.
+ *
+ * @param value The string representation of the value, which will have its content properly indented.
+ * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.
+ * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
+ * @param context.indent The indent level of the token.
+ * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.
+ * @param context.offset The offset position of the token.
+ * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
+ */
+function createScalarToken(value, context) {
+ var _a;
+ const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;
+ const source = stringifyString({ type, value }, {
+ implicitKey,
+ indent: indent > 0 ? ' '.repeat(indent) : '',
+ inFlow,
+ options: { blockQuote: true, lineWidth: -1 }
+ });
+ const end = (_a = context.end) !== null && _a !== void 0 ? _a : [
+ { type: 'newline', offset: -1, indent, source: '\n' }
+ ];
+ switch (source[0]) {
+ case '|':
+ case '>': {
+ const he = source.indexOf('\n');
+ const head = source.substring(0, he);
+ const body = source.substring(he + 1) + '\n';
+ const props = [
+ { type: 'block-scalar-header', offset, indent, source: head }
+ ];
+ if (!addEndtoBlockProps(props, end))
+ props.push({ type: 'newline', offset: -1, indent, source: '\n' });
+ return { type: 'block-scalar', offset, indent, props, source: body };
+ }
+ case '"':
+ return { type: 'double-quoted-scalar', offset, indent, source, end };
+ case "'":
+ return { type: 'single-quoted-scalar', offset, indent, source, end };
+ default:
+ return { type: 'scalar', offset, indent, source, end };
+ }
+}
+/**
+ * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
+ *
+ * Best efforts are made to retain any comments previously associated with the `token`,
+ * though all contents within a collection's `items` will be overwritten.
+ *
+ * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
+ * as this function does not support any schema operations and won't check for such conflicts.
+ *
+ * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
+ * @param value The string representation of the value, which will have its content properly indented.
+ * @param context.afterKey In most cases, values after a key should have an additional level of indentation.
+ * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
+ * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
+ * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
+ */
+function setScalarValue(token, value, context = {}) {
+ let { afterKey = false, implicitKey = false, inFlow = false, type } = context;
+ let indent = 'indent' in token ? token.indent : null;
+ if (afterKey && typeof indent === 'number')
+ indent += 2;
+ if (!type)
+ switch (token.type) {
+ case 'single-quoted-scalar':
+ type = 'QUOTE_SINGLE';
+ break;
+ case 'double-quoted-scalar':
+ type = 'QUOTE_DOUBLE';
+ break;
+ case 'block-scalar': {
+ const header = token.props[0];
+ if (header.type !== 'block-scalar-header')
+ throw new Error('Invalid block scalar header');
+ type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';
+ break;
+ }
+ default:
+ type = 'PLAIN';
+ }
+ const source = stringifyString({ type, value }, {
+ implicitKey: implicitKey || indent === null,
+ indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',
+ inFlow,
+ options: { blockQuote: true, lineWidth: -1 }
+ });
+ switch (source[0]) {
+ case '|':
+ case '>':
+ setBlockScalarValue(token, source);
+ break;
+ case '"':
+ setFlowScalarValue(token, source, 'double-quoted-scalar');
+ break;
+ case "'":
+ setFlowScalarValue(token, source, 'single-quoted-scalar');
+ break;
+ default:
+ setFlowScalarValue(token, source, 'scalar');
+ }
+}
+function setBlockScalarValue(token, source) {
+ const he = source.indexOf('\n');
+ const head = source.substring(0, he);
+ const body = source.substring(he + 1) + '\n';
+ if (token.type === 'block-scalar') {
+ const header = token.props[0];
+ if (header.type !== 'block-scalar-header')
+ throw new Error('Invalid block scalar header');
+ header.source = head;
+ token.source = body;
+ }
+ else {
+ const { offset } = token;
+ const indent = 'indent' in token ? token.indent : -1;
+ const props = [
+ { type: 'block-scalar-header', offset, indent, source: head }
+ ];
+ if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))
+ props.push({ type: 'newline', offset: -1, indent, source: '\n' });
+ for (const key of Object.keys(token))
+ if (key !== 'type' && key !== 'offset')
+ delete token[key];
+ Object.assign(token, { type: 'block-scalar', indent, props, source: body });
+ }
+}
+/** @returns `true` if last token is a newline */
+function addEndtoBlockProps(props, end) {
+ if (end)
+ for (const st of end)
+ switch (st.type) {
+ case 'space':
+ case 'comment':
+ props.push(st);
+ break;
+ case 'newline':
+ props.push(st);
+ return true;
+ }
+ return false;
+}
+function setFlowScalarValue(token, source, type) {
+ switch (token.type) {
+ case 'scalar':
+ case 'double-quoted-scalar':
+ case 'single-quoted-scalar':
+ token.type = type;
+ token.source = source;
+ break;
+ case 'block-scalar': {
+ const end = token.props.slice(1);
+ let oa = source.length;
+ if (token.props[0].type === 'block-scalar-header')
+ oa -= token.props[0].source.length;
+ for (const tok of end)
+ tok.offset += oa;
+ delete token.props;
+ Object.assign(token, { type, source, end });
+ break;
+ }
+ case 'block-map':
+ case 'block-seq': {
+ const offset = token.offset + source.length;
+ const nl = { type: 'newline', offset, indent: token.indent, source: '\n' };
+ delete token.items;
+ Object.assign(token, { type, source, end: [nl] });
+ break;
+ }
+ default: {
+ const indent = 'indent' in token ? token.indent : -1;
+ const end = 'end' in token && Array.isArray(token.end)
+ ? token.end.filter(st => st.type === 'space' ||
+ st.type === 'comment' ||
+ st.type === 'newline')
+ : [];
+ for (const key of Object.keys(token))
+ if (key !== 'type' && key !== 'offset')
+ delete token[key];
+ Object.assign(token, { type, indent, source, end });
+ }
+ }
+}
+
+export { createScalarToken, resolveAsScalar, setScalarValue };
diff --git a/node_modules/yaml/browser/dist/parse/cst-stringify.js b/node_modules/yaml/browser/dist/parse/cst-stringify.js
new file mode 100644
index 0000000..d6ab58c
--- /dev/null
+++ b/node_modules/yaml/browser/dist/parse/cst-stringify.js
@@ -0,0 +1,61 @@
+/**
+ * Stringify a CST document, token, or collection item
+ *
+ * Fair warning: This applies no validation whatsoever, and
+ * simply concatenates the sources in their logical order.
+ */
+const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);
+function stringifyToken(token) {
+ switch (token.type) {
+ case 'block-scalar': {
+ let res = '';
+ for (const tok of token.props)
+ res += stringifyToken(tok);
+ return res + token.source;
+ }
+ case 'block-map':
+ case 'block-seq': {
+ let res = '';
+ for (const item of token.items)
+ res += stringifyItem(item);
+ return res;
+ }
+ case 'flow-collection': {
+ let res = token.start.source;
+ for (const item of token.items)
+ res += stringifyItem(item);
+ for (const st of token.end)
+ res += st.source;
+ return res;
+ }
+ case 'document': {
+ let res = stringifyItem(token);
+ if (token.end)
+ for (const st of token.end)
+ res += st.source;
+ return res;
+ }
+ default: {
+ let res = token.source;
+ if ('end' in token && token.end)
+ for (const st of token.end)
+ res += st.source;
+ return res;
+ }
+ }
+}
+function stringifyItem({ start, key, sep, value }) {
+ let res = '';
+ for (const st of start)
+ res += st.source;
+ if (key)
+ res += stringifyToken(key);
+ if (sep)
+ for (const st of sep)
+ res += st.source;
+ if (value)
+ res += stringifyToken(value);
+ return res;
+}
+
+export { stringify };
diff --git a/node_modules/yaml/browser/dist/parse/cst-visit.js b/node_modules/yaml/browser/dist/parse/cst-visit.js
new file mode 100644
index 0000000..1cef86e
--- /dev/null
+++ b/node_modules/yaml/browser/dist/parse/cst-visit.js
@@ -0,0 +1,97 @@
+const BREAK = Symbol('break visit');
+const SKIP = Symbol('skip children');
+const REMOVE = Symbol('remove item');
+/**
+ * Apply a visitor to a CST document or item.
+ *
+ * Walks through the tree (depth-first) starting from the root, calling a
+ * `visitor` function with two arguments when entering each item:
+ * - `item`: The current item, which included the following members:
+ * - `start: SourceToken[]` – Source tokens before the key or value,
+ * possibly including its anchor or tag.
+ * - `key?: Token | null` – Set for pair values. May then be `null`, if
+ * the key before the `:` separator is empty.
+ * - `sep?: SourceToken[]` – Source tokens between the key and the value,
+ * which should include the `:` map value indicator if `value` is set.
+ * - `value?: Token` – The value of a sequence item, or of a map pair.
+ * - `path`: The steps from the root to the current node, as an array of
+ * `['key' | 'value', number]` tuples.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this token, continue with
+ * next sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current item, then continue with the next one
+ * - `number`: Set the index of the next step. This is useful especially if
+ * the index of the current token has changed.
+ * - `function`: Define the next visitor for this item. After the original
+ * visitor is called on item entry, next visitors are called after handling
+ * a non-empty `key` and when exiting the item.
+ */
+function visit(cst, visitor) {
+ if ('type' in cst && cst.type === 'document')
+ cst = { start: cst.start, value: cst.value };
+ _visit(Object.freeze([]), cst, visitor);
+}
+// Without the `as symbol` casts, TS declares these in the `visit`
+// namespace using `var`, but then complains about that because
+// `unique symbol` must be `const`.
+/** Terminate visit traversal completely */
+visit.BREAK = BREAK;
+/** Do not visit the children of the current item */
+visit.SKIP = SKIP;
+/** Remove the current item */
+visit.REMOVE = REMOVE;
+/** Find the item at `path` from `cst` as the root */
+visit.itemAtPath = (cst, path) => {
+ let item = cst;
+ for (const [field, index] of path) {
+ const tok = item === null || item === void 0 ? void 0 : item[field];
+ if (tok && 'items' in tok) {
+ item = tok.items[index];
+ }
+ else
+ return undefined;
+ }
+ return item;
+};
+/**
+ * Get the immediate parent collection of the item at `path` from `cst` as the root.
+ *
+ * Throws an error if the collection is not found, which should never happen if the item itself exists.
+ */
+visit.parentCollection = (cst, path) => {
+ const parent = visit.itemAtPath(cst, path.slice(0, -1));
+ const field = path[path.length - 1][0];
+ const coll = parent === null || parent === void 0 ? void 0 : parent[field];
+ if (coll && 'items' in coll)
+ return coll;
+ throw new Error('Parent collection not found');
+};
+function _visit(path, item, visitor) {
+ let ctrl = visitor(item, path);
+ if (typeof ctrl === 'symbol')
+ return ctrl;
+ for (const field of ['key', 'value']) {
+ const token = item[field];
+ if (token && 'items' in token) {
+ for (let i = 0; i < token.items.length; ++i) {
+ const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);
+ if (typeof ci === 'number')
+ i = ci - 1;
+ else if (ci === BREAK)
+ return BREAK;
+ else if (ci === REMOVE) {
+ token.items.splice(i, 1);
+ i -= 1;
+ }
+ }
+ if (typeof ctrl === 'function' && field === 'key')
+ ctrl = ctrl(item, path);
+ }
+ }
+ return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;
+}
+
+export { visit };
diff --git a/node_modules/yaml/browser/dist/parse/cst.js b/node_modules/yaml/browser/dist/parse/cst.js
new file mode 100644
index 0000000..8bb2f4a
--- /dev/null
+++ b/node_modules/yaml/browser/dist/parse/cst.js
@@ -0,0 +1,98 @@
+export { createScalarToken, resolveAsScalar, setScalarValue } from './cst-scalar.js';
+export { stringify } from './cst-stringify.js';
+export { visit } from './cst-visit.js';
+
+/** The byte order mark */
+const BOM = '\u{FEFF}';
+/** Start of doc-mode */
+const DOCUMENT = '\x02'; // C0: Start of Text
+/** Unexpected end of flow-mode */
+const FLOW_END = '\x18'; // C0: Cancel
+/** Next token is a scalar value */
+const SCALAR = '\x1f'; // C0: Unit Separator
+/** @returns `true` if `token` is a flow or block collection */
+const isCollection = (token) => !!token && 'items' in token;
+/** @returns `true` if `token` is a flow or block scalar; not an alias */
+const isScalar = (token) => !!token &&
+ (token.type === 'scalar' ||
+ token.type === 'single-quoted-scalar' ||
+ token.type === 'double-quoted-scalar' ||
+ token.type === 'block-scalar');
+/* istanbul ignore next */
+/** Get a printable representation of a lexer token */
+function prettyToken(token) {
+ switch (token) {
+ case BOM:
+ return '<BOM>';
+ case DOCUMENT:
+ return '<DOC>';
+ case FLOW_END:
+ return '<FLOW_END>';
+ case SCALAR:
+ return '<SCALAR>';
+ default:
+ return JSON.stringify(token);
+ }
+}
+/** Identify the type of a lexer token. May return `null` for unknown tokens. */
+function tokenType(source) {
+ switch (source) {
+ case BOM:
+ return 'byte-order-mark';
+ case DOCUMENT:
+ return 'doc-mode';
+ case FLOW_END:
+ return 'flow-error-end';
+ case SCALAR:
+ return 'scalar';
+ case '---':
+ return 'doc-start';
+ case '...':
+ return 'doc-end';
+ case '':
+ case '\n':
+ case '\r\n':
+ return 'newline';
+ case '-':
+ return 'seq-item-ind';
+ case '?':
+ return 'explicit-key-ind';
+ case ':':
+ return 'map-value-ind';
+ case '{':
+ return 'flow-map-start';
+ case '}':
+ return 'flow-map-end';
+ case '[':
+ return 'flow-seq-start';
+ case ']':
+ return 'flow-seq-end';
+ case ',':
+ return 'comma';
+ }
+ switch (source[0]) {
+ case ' ':
+ case '\t':
+ return 'space';
+ case '#':
+ return 'comment';
+ case '%':
+ return 'directive-line';
+ case '*':
+ return 'alias';
+ case '&':
+ return 'anchor';
+ case '!':
+ return 'tag';
+ case "'":
+ return 'single-quoted-scalar';
+ case '"':
+ return 'double-quoted-scalar';
+ case '|':
+ case '>':
+ return 'block-scalar-header';
+ }
+ return null;
+}
+
+export { BOM, DOCUMENT, FLOW_END, SCALAR, isCollection, isScalar, prettyToken, tokenType };
diff --git a/node_modules/yaml/browser/dist/parse/lexer.js b/node_modules/yaml/browser/dist/parse/lexer.js
new file mode 100644
index 0000000..d866418
--- /dev/null
+++ b/node_modules/yaml/browser/dist/parse/lexer.js
@@ -0,0 +1,702 @@
+import { BOM, DOCUMENT, FLOW_END, SCALAR } from './cst.js';
+
+/*
+START -> stream
+
+stream
+ directive -> line-end -> stream
+ indent + line-end -> stream
+ [else] -> line-start
+
+line-end
+ comment -> line-end
+ newline -> .
+ input-end -> END
+
+line-start
+ doc-start -> doc
+ doc-end -> stream
+ [else] -> indent -> block-start
+
+block-start
+ seq-item-start -> block-start
+ explicit-key-start -> block-start
+ map-value-start -> block-start
+ [else] -> doc
+
+doc
+ line-end -> line-start
+ spaces -> doc
+ anchor -> doc
+ tag -> doc
+ flow-start -> flow -> doc
+ flow-end -> error -> doc
+ seq-item-start -> error -> doc
+ explicit-key-start -> error -> doc
+ map-value-start -> doc
+ alias -> doc
+ quote-start -> quoted-scalar -> doc
+ block-scalar-header -> line-end -> block-scalar(min) -> line-start
+ [else] -> plain-scalar(false, min) -> doc
+
+flow
+ line-end -> flow
+ spaces -> flow
+ anchor -> flow
+ tag -> flow
+ flow-start -> flow -> flow
+ flow-end -> .
+ seq-item-start -> error -> flow
+ explicit-key-start -> flow
+ map-value-start -> flow
+ alias -> flow
+ quote-start -> quoted-scalar -> flow
+ comma -> flow
+ [else] -> plain-scalar(true, 0) -> flow
+
+quoted-scalar
+ quote-end -> .
+ [else] -> quoted-scalar
+
+block-scalar(min)
+ newline + peek(indent < min) -> .
+ [else] -> block-scalar(min)
+
+plain-scalar(is-flow, min)
+ scalar-end(is-flow) -> .
+ peek(newline + (indent < min)) -> .
+ [else] -> plain-scalar(min)
+*/
+function isEmpty(ch) {
+ switch (ch) {
+ case undefined:
+ case ' ':
+ case '\n':
+ case '\r':
+ case '\t':
+ return true;
+ default:
+ return false;
+ }
+}
+const hexDigits = '0123456789ABCDEFabcdef'.split('');
+const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split('');
+const invalidFlowScalarChars = ',[]{}'.split('');
+const invalidAnchorChars = ' ,[]{}\n\r\t'.split('');
+const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);
+/**
+ * Splits an input string into lexical tokens, i.e. smaller strings that are
+ * easily identifiable by `tokens.tokenType()`.
+ *
+ * Lexing starts always in a "stream" context. Incomplete input may be buffered
+ * until a complete token can be emitted.
+ *
+ * In addition to slices of the original input, the following control characters
+ * may also be emitted:
+ *
+ * - `\x02` (Start of Text): A document starts with the next token
+ * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
+ * - `\x1f` (Unit Separator): Next token is a scalar value
+ * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
+ */
+class Lexer {
+ constructor() {
+ /**
+ * Flag indicating whether the end of the current buffer marks the end of
+ * all input
+ */
+ this.atEnd = false;
+ /**
+ * Explicit indent set in block scalar header, as an offset from the current
+ * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
+ * explicitly set.
+ */
+ this.blockScalarIndent = -1;
+ /**
+ * Block scalars that include a + (keep) chomping indicator in their header
+ * include trailing empty lines, which are otherwise excluded from the
+ * scalar's contents.
+ */
+ this.blockScalarKeep = false;
+ /** Current input */
+ this.buffer = '';
+ /**
+ * Flag noting whether the map value indicator : can immediately follow this
+ * node within a flow context.
+ */
+ this.flowKey = false;
+ /** Count of surrounding flow collection levels. */
+ this.flowLevel = 0;
+ /**
+ * Minimum level of indentation required for next lines to be parsed as a
+ * part of the current scalar value.
+ */
+ this.indentNext = 0;
+ /** Indentation level of the current line. */
+ this.indentValue = 0;
+ /** Position of the next \n character. */
+ this.lineEndPos = null;
+ /** Stores the state of the lexer if reaching the end of incpomplete input */
+ this.next = null;
+ /** A pointer to `buffer`; the current position of the lexer. */
+ this.pos = 0;
+ }
+ /**
+ * Generate YAML tokens from the `source` string. If `incomplete`,
+ * a part of the last line may be left as a buffer for the next call.
+ *
+ * @returns A generator of lexical tokens
+ */
+ *lex(source, incomplete = false) {
+ var _a;
+ if (source) {
+ this.buffer = this.buffer ? this.buffer + source : source;
+ this.lineEndPos = null;
+ }
+ this.atEnd = !incomplete;
+ let next = (_a = this.next) !== null && _a !== void 0 ? _a : 'stream';
+ while (next && (incomplete || this.hasChars(1)))
+ next = yield* this.parseNext(next);
+ }
+ atLineEnd() {
+ let i = this.pos;
+ let ch = this.buffer[i];
+ while (ch === ' ' || ch === '\t')
+ ch = this.buffer[++i];
+ if (!ch || ch === '#' || ch === '\n')
+ return true;
+ if (ch === '\r')
+ return this.buffer[i + 1] === '\n';
+ return false;
+ }
+ charAt(n) {
+ return this.buffer[this.pos + n];
+ }
+ continueScalar(offset) {
+ let ch = this.buffer[offset];
+ if (this.indentNext > 0) {
+ let indent = 0;
+ while (ch === ' ')
+ ch = this.buffer[++indent + offset];
+ if (ch === '\r') {
+ const next = this.buffer[indent + offset + 1];
+ if (next === '\n' || (!next && !this.atEnd))
+ return offset + indent + 1;
+ }
+ return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd)
+ ? offset + indent
+ : -1;
+ }
+ if (ch === '-' || ch === '.') {
+ const dt = this.buffer.substr(offset, 3);
+ if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))
+ return -1;
+ }
+ return offset;
+ }
+ getLine() {
+ let end = this.lineEndPos;
+ if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {
+ end = this.buffer.indexOf('\n', this.pos);
+ this.lineEndPos = end;
+ }
+ if (end === -1)
+ return this.atEnd ? this.buffer.substring(this.pos) : null;
+ if (this.buffer[end - 1] === '\r')
+ end -= 1;
+ return this.buffer.substring(this.pos, end);
+ }
+ hasChars(n) {
+ return this.pos + n <= this.buffer.length;
+ }
+ setNext(state) {
+ this.buffer = this.buffer.substring(this.pos);
+ this.pos = 0;
+ this.lineEndPos = null;
+ this.next = state;
+ return null;
+ }
+ peek(n) {
+ return this.buffer.substr(this.pos, n);
+ }
+ *parseNext(next) {
+ switch (next) {
+ case 'stream':
+ return yield* this.parseStream();
+ case 'line-start':
+ return yield* this.parseLineStart();
+ case 'block-start':
+ return yield* this.parseBlockStart();
+ case 'doc':
+ return yield* this.parseDocument();
+ case 'flow':
+ return yield* this.parseFlowCollection();
+ case 'quoted-scalar':
+ return yield* this.parseQuotedScalar();
+ case 'block-scalar':
+ return yield* this.parseBlockScalar();
+ case 'plain-scalar':
+ return yield* this.parsePlainScalar();
+ }
+ }
+ *parseStream() {
+ let line = this.getLine();
+ if (line === null)
+ return this.setNext('stream');
+ if (line[0] === BOM) {
+ yield* this.pushCount(1);
+ line = line.substring(1);
+ }
+ if (line[0] === '%') {
+ let dirEnd = line.length;
+ const cs = line.indexOf('#');
+ if (cs !== -1) {
+ const ch = line[cs - 1];
+ if (ch === ' ' || ch === '\t')
+ dirEnd = cs - 1;
+ }
+ while (true) {
+ const ch = line[dirEnd - 1];
+ if (ch === ' ' || ch === '\t')
+ dirEnd -= 1;
+ else
+ break;
+ }
+ const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));
+ yield* this.pushCount(line.length - n); // possible comment
+ this.pushNewline();
+ return 'stream';
+ }
+ if (this.atLineEnd()) {
+ const sp = yield* this.pushSpaces(true);
+ yield* this.pushCount(line.length - sp);
+ yield* this.pushNewline();
+ return 'stream';
+ }
+ yield DOCUMENT;
+ return yield* this.parseLineStart();
+ }
+ *parseLineStart() {
+ const ch = this.charAt(0);
+ if (!ch && !this.atEnd)
+ return this.setNext('line-start');
+ if (ch === '-' || ch === '.') {
+ if (!this.atEnd && !this.hasChars(4))
+ return this.setNext('line-start');
+ const s = this.peek(3);
+ if (s === '---' && isEmpty(this.charAt(3))) {
+ yield* this.pushCount(3);
+ this.indentValue = 0;
+ this.indentNext = 0;
+ return 'doc';
+ }
+ else if (s === '...' && isEmpty(this.charAt(3))) {
+ yield* this.pushCount(3);
+ return 'stream';
+ }
+ }
+ this.indentValue = yield* this.pushSpaces(false);
+ if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))
+ this.indentNext = this.indentValue;
+ return yield* this.parseBlockStart();
+ }
+ *parseBlockStart() {
+ const [ch0, ch1] = this.peek(2);
+ if (!ch1 && !this.atEnd)
+ return this.setNext('block-start');
+ if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {
+ const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));
+ this.indentNext = this.indentValue + 1;
+ this.indentValue += n;
+ return yield* this.parseBlockStart();
+ }
+ return 'doc';
+ }
+ *parseDocument() {
+ yield* this.pushSpaces(true);
+ const line = this.getLine();
+ if (line === null)
+ return this.setNext('doc');
+ let n = yield* this.pushIndicators();
+ switch (line[n]) {
+ case '#':
+ yield* this.pushCount(line.length - n);
+ // fallthrough
+ case undefined:
+ yield* this.pushNewline();
+ return yield* this.parseLineStart();
+ case '{':
+ case '[':
+ yield* this.pushCount(1);
+ this.flowKey = false;
+ this.flowLevel = 1;
+ return 'flow';
+ case '}':
+ case ']':
+ // this is an error
+ yield* this.pushCount(1);
+ return 'doc';
+ case '*':
+ yield* this.pushUntil(isNotAnchorChar);
+ return 'doc';
+ case '"':
+ case "'":
+ return yield* this.parseQuotedScalar();
+ case '|':
+ case '>':
+ n += yield* this.parseBlockScalarHeader();
+ n += yield* this.pushSpaces(true);
+ yield* this.pushCount(line.length - n);
+ yield* this.pushNewline();
+ return yield* this.parseBlockScalar();
+ default:
+ return yield* this.parsePlainScalar();
+ }
+ }
+ *parseFlowCollection() {
+ let nl, sp;
+ let indent = -1;
+ do {
+ nl = yield* this.pushNewline();
+ if (nl > 0) {
+ sp = yield* this.pushSpaces(false);
+ this.indentValue = indent = sp;
+ }
+ else {
+ sp = 0;
+ }
+ sp += yield* this.pushSpaces(true);
+ } while (nl + sp > 0);
+ const line = this.getLine();
+ if (line === null)
+ return this.setNext('flow');
+ if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||
+ (indent === 0 &&
+ (line.startsWith('---') || line.startsWith('...')) &&
+ isEmpty(line[3]))) {
+ // Allowing for the terminal ] or } at the same (rather than greater)
+ // indent level as the initial [ or { is technically invalid, but
+ // failing here would be surprising to users.
+ const atFlowEndMarker = indent === this.indentNext - 1 &&
+ this.flowLevel === 1 &&
+ (line[0] === ']' || line[0] === '}');
+ if (!atFlowEndMarker) {
+ // this is an error
+ this.flowLevel = 0;
+ yield FLOW_END;
+ return yield* this.parseLineStart();
+ }
+ }
+ let n = 0;
+ while (line[n] === ',') {
+ n += yield* this.pushCount(1);
+ n += yield* this.pushSpaces(true);
+ this.flowKey = false;
+ }
+ n += yield* this.pushIndicators();
+ switch (line[n]) {
+ case undefined:
+ return 'flow';
+ case '#':
+ yield* this.pushCount(line.length - n);
+ return 'flow';
+ case '{':
+ case '[':
+ yield* this.pushCount(1);
+ this.flowKey = false;
+ this.flowLevel += 1;
+ return 'flow';
+ case '}':
+ case ']':
+ yield* this.pushCount(1);
+ this.flowKey = true;
+ this.flowLevel -= 1;
+ return this.flowLevel ? 'flow' : 'doc';
+ case '*':
+ yield* this.pushUntil(isNotAnchorChar);
+ return 'flow';
+ case '"':
+ case "'":
+ this.flowKey = true;
+ return yield* this.parseQuotedScalar();
+ case ':': {
+ const next = this.charAt(1);
+ if (this.flowKey || isEmpty(next) || next === ',') {
+ this.flowKey = false;
+ yield* this.pushCount(1);
+ yield* this.pushSpaces(true);
+ return 'flow';
+ }
+ }
+ // fallthrough
+ default:
+ this.flowKey = false;
+ return yield* this.parsePlainScalar();
+ }
+ }
+ *parseQuotedScalar() {
+ const quote = this.charAt(0);
+ let end = this.buffer.indexOf(quote, this.pos + 1);
+ if (quote === "'") {
+ while (end !== -1 && this.buffer[end + 1] === "'")
+ end = this.buffer.indexOf("'", end + 2);
+ }
+ else {
+ // double-quote
+ while (end !== -1) {
+ let n = 0;
+ while (this.buffer[end - 1 - n] === '\\')
+ n += 1;
+ if (n % 2 === 0)
+ break;
+ end = this.buffer.indexOf('"', end + 1);
+ }
+ }
+ // Only looking for newlines within the quotes
+ const qb = this.buffer.substring(0, end);
+ let nl = qb.indexOf('\n', this.pos);
+ if (nl !== -1) {
+ while (nl !== -1) {
+ const cs = this.continueScalar(nl + 1);
+ if (cs === -1)
+ break;
+ nl = qb.indexOf('\n', cs);
+ }
+ if (nl !== -1) {
+ // this is an error caused by an unexpected unindent
+ end = nl - (qb[nl - 1] === '\r' ? 2 : 1);
+ }
+ }
+ if (end === -1) {
+ if (!this.atEnd)
+ return this.setNext('quoted-scalar');
+ end = this.buffer.length;
+ }
+ yield* this.pushToIndex(end + 1, false);
+ return this.flowLevel ? 'flow' : 'doc';
+ }
+ *parseBlockScalarHeader() {
+ this.blockScalarIndent = -1;
+ this.blockScalarKeep = false;
+ let i = this.pos;
+ while (true) {
+ const ch = this.buffer[++i];
+ if (ch === '+')
+ this.blockScalarKeep = true;
+ else if (ch > '0' && ch <= '9')
+ this.blockScalarIndent = Number(ch) - 1;
+ else if (ch !== '-')
+ break;
+ }
+ return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');
+ }
+ *parseBlockScalar() {
+ let nl = this.pos - 1; // may be -1 if this.pos === 0
+ let indent = 0;
+ let ch;
+ loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {
+ switch (ch) {
+ case ' ':
+ indent += 1;
+ break;
+ case '\n':
+ nl = i;
+ indent = 0;
+ break;
+ case '\r': {
+ const next = this.buffer[i + 1];
+ if (!next && !this.atEnd)
+ return this.setNext('block-scalar');
+ if (next === '\n')
+ break;
+ } // fallthrough
+ default:
+ break loop;
+ }
+ }
+ if (!ch && !this.atEnd)
+ return this.setNext('block-scalar');
+ if (indent >= this.indentNext) {
+ if (this.blockScalarIndent === -1)
+ this.indentNext = indent;
+ else
+ this.indentNext += this.blockScalarIndent;
+ do {
+ const cs = this.continueScalar(nl + 1);
+ if (cs === -1)
+ break;
+ nl = this.buffer.indexOf('\n', cs);
+ } while (nl !== -1);
+ if (nl === -1) {
+ if (!this.atEnd)
+ return this.setNext('block-scalar');
+ nl = this.buffer.length;
+ }
+ }
+ if (!this.blockScalarKeep) {
+ do {
+ let i = nl - 1;
+ let ch = this.buffer[i];
+ if (ch === '\r')
+ ch = this.buffer[--i];
+ const lastChar = i; // Drop the line if last char not more indented
+ while (ch === ' ' || ch === '\t')
+ ch = this.buffer[--i];
+ if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
+ nl = i;
+ else
+ break;
+ } while (true);
+ }
+ yield SCALAR;
+ yield* this.pushToIndex(nl + 1, true);
+ return yield* this.parseLineStart();
+ }
+ *parsePlainScalar() {
+ const inFlow = this.flowLevel > 0;
+ let end = this.pos - 1;
+ let i = this.pos - 1;
+ let ch;
+ while ((ch = this.buffer[++i])) {
+ if (ch === ':') {
+ const next = this.buffer[i + 1];
+ if (isEmpty(next) || (inFlow && next === ','))
+ break;
+ end = i;
+ }
+ else if (isEmpty(ch)) {
+ let next = this.buffer[i + 1];
+ if (ch === '\r') {
+ if (next === '\n') {
+ i += 1;
+ ch = '\n';
+ next = this.buffer[i + 1];
+ }
+ else
+ end = i;
+ }
+ if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))
+ break;
+ if (ch === '\n') {
+ const cs = this.continueScalar(i + 1);
+ if (cs === -1)
+ break;
+ i = Math.max(i, cs - 2); // to advance, but still account for ' #'
+ }
+ }
+ else {
+ if (inFlow && invalidFlowScalarChars.includes(ch))
+ break;
+ end = i;
+ }
+ }
+ if (!ch && !this.atEnd)
+ return this.setNext('plain-scalar');
+ yield SCALAR;
+ yield* this.pushToIndex(end + 1, true);
+ return inFlow ? 'flow' : 'doc';
+ }
+ *pushCount(n) {
+ if (n > 0) {
+ yield this.buffer.substr(this.pos, n);
+ this.pos += n;
+ return n;
+ }
+ return 0;
+ }
+ *pushToIndex(i, allowEmpty) {
+ const s = this.buffer.slice(this.pos, i);
+ if (s) {
+ yield s;
+ this.pos += s.length;
+ return s.length;
+ }
+ else if (allowEmpty)
+ yield '';
+ return 0;
+ }
+ *pushIndicators() {
+ switch (this.charAt(0)) {
+ case '!':
+ return ((yield* this.pushTag()) +
+ (yield* this.pushSpaces(true)) +
+ (yield* this.pushIndicators()));
+ case '&':
+ return ((yield* this.pushUntil(isNotAnchorChar)) +
+ (yield* this.pushSpaces(true)) +
+ (yield* this.pushIndicators()));
+ case '-': // this is an error
+ case '?': // this is an error outside flow collections
+ case ':': {
+ const inFlow = this.flowLevel > 0;
+ const ch1 = this.charAt(1);
+ if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {
+ if (!inFlow)
+ this.indentNext = this.indentValue + 1;
+ else if (this.flowKey)
+ this.flowKey = false;
+ return ((yield* this.pushCount(1)) +
+ (yield* this.pushSpaces(true)) +
+ (yield* this.pushIndicators()));
+ }
+ }
+ }
+ return 0;
+ }
+ *pushTag() {
+ if (this.charAt(1) === '<') {
+ let i = this.pos + 2;
+ let ch = this.buffer[i];
+ while (!isEmpty(ch) && ch !== '>')
+ ch = this.buffer[++i];
+ return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);
+ }
+ else {
+ let i = this.pos + 1;
+ let ch = this.buffer[i];
+ while (ch) {
+ if (tagChars.includes(ch))
+ ch = this.buffer[++i];
+ else if (ch === '%' &&
+ hexDigits.includes(this.buffer[i + 1]) &&
+ hexDigits.includes(this.buffer[i + 2])) {
+ ch = this.buffer[(i += 3)];
+ }
+ else
+ break;
+ }
+ return yield* this.pushToIndex(i, false);
+ }
+ }
+ *pushNewline() {
+ const ch = this.buffer[this.pos];
+ if (ch === '\n')
+ return yield* this.pushCount(1);
+ else if (ch === '\r' && this.charAt(1) === '\n')
+ return yield* this.pushCount(2);
+ else
+ return 0;
+ }
+ *pushSpaces(allowTabs) {
+ let i = this.pos - 1;
+ let ch;
+ do {
+ ch = this.buffer[++i];
+ } while (ch === ' ' || (allowTabs && ch === '\t'));
+ const n = i - this.pos;
+ if (n > 0) {
+ yield this.buffer.substr(this.pos, n);
+ this.pos = i;
+ }
+ return n;
+ }
+ *pushUntil(test) {
+ let i = this.pos;
+ let ch = this.buffer[i];
+ while (!test(ch))
+ ch = this.buffer[++i];
+ return yield* this.pushToIndex(i, false);
+ }
+}
+
+export { Lexer };
diff --git a/node_modules/yaml/browser/dist/parse/line-counter.js b/node_modules/yaml/browser/dist/parse/line-counter.js
new file mode 100644
index 0000000..002ce24
--- /dev/null
+++ b/node_modules/yaml/browser/dist/parse/line-counter.js
@@ -0,0 +1,39 @@
+/**
+ * Tracks newlines during parsing in order to provide an efficient API for
+ * determining the one-indexed `{ line, col }` position for any offset
+ * within the input.
+ */
+class LineCounter {
+ constructor() {
+ this.lineStarts = [];
+ /**
+ * Should be called in ascending order. Otherwise, call
+ * `lineCounter.lineStarts.sort()` before calling `linePos()`.
+ */
+ this.addNewLine = (offset) => this.lineStarts.push(offset);
+ /**
+ * Performs a binary search and returns the 1-indexed { line, col }
+ * position of `offset`. If `line === 0`, `addNewLine` has never been
+ * called or `offset` is before the first known newline.
+ */
+ this.linePos = (offset) => {
+ let low = 0;
+ let high = this.lineStarts.length;
+ while (low < high) {
+ const mid = (low + high) >> 1; // Math.floor((low + high) / 2)
+ if (this.lineStarts[mid] < offset)
+ low = mid + 1;
+ else
+ high = mid;
+ }
+ if (this.lineStarts[low] === offset)
+ return { line: low + 1, col: 1 };
+ if (low === 0)
+ return { line: 0, col: offset };
+ const start = this.lineStarts[low - 1];
+ return { line: low, col: offset - start + 1 };
+ };
+ }
+}
+
+export { LineCounter };
diff --git a/node_modules/yaml/browser/dist/parse/parser.js b/node_modules/yaml/browser/dist/parse/parser.js
new file mode 100644
index 0000000..15da2b8
--- /dev/null
+++ b/node_modules/yaml/browser/dist/parse/parser.js
@@ -0,0 +1,952 @@
+import { tokenType } from './cst.js';
+import { Lexer } from './lexer.js';
+
+function includesToken(list, type) {
+ for (let i = 0; i < list.length; ++i)
+ if (list[i].type === type)
+ return true;
+ return false;
+}
+function includesNonEmpty(list) {
+ for (let i = 0; i < list.length; ++i) {
+ switch (list[i].type) {
+ case 'space':
+ case 'comment':
+ case 'newline':
+ break;
+ default:
+ return true;
+ }
+ }
+ return false;
+}
+function isFlowToken(token) {
+ switch (token === null || token === void 0 ? void 0 : token.type) {
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ case 'flow-collection':
+ return true;
+ default:
+ return false;
+ }
+}
+function getPrevProps(parent) {
+ var _a;
+ switch (parent.type) {
+ case 'document':
+ return parent.start;
+ case 'block-map': {
+ const it = parent.items[parent.items.length - 1];
+ return (_a = it.sep) !== null && _a !== void 0 ? _a : it.start;
+ }
+ case 'block-seq':
+ return parent.items[parent.items.length - 1].start;
+ /* istanbul ignore next should not happen */
+ default:
+ return [];
+ }
+}
+/** Note: May modify input array */
+function getFirstKeyStartProps(prev) {
+ var _a;
+ if (prev.length === 0)
+ return [];
+ let i = prev.length;
+ loop: while (--i >= 0) {
+ switch (prev[i].type) {
+ case 'doc-start':
+ case 'explicit-key-ind':
+ case 'map-value-ind':
+ case 'seq-item-ind':
+ case 'newline':
+ break loop;
+ }
+ }
+ while (((_a = prev[++i]) === null || _a === void 0 ? void 0 : _a.type) === 'space') {
+ /* loop */
+ }
+ return prev.splice(i, prev.length);
+}
+function fixFlowSeqItems(fc) {
+ if (fc.start.type === 'flow-seq-start') {
+ for (const it of fc.items) {
+ if (it.sep &&
+ !it.value &&
+ !includesToken(it.start, 'explicit-key-ind') &&
+ !includesToken(it.sep, 'map-value-ind')) {
+ if (it.key)
+ it.value = it.key;
+ delete it.key;
+ if (isFlowToken(it.value)) {
+ if (it.value.end)
+ Array.prototype.push.apply(it.value.end, it.sep);
+ else
+ it.value.end = it.sep;
+ }
+ else
+ Array.prototype.push.apply(it.start, it.sep);
+ delete it.sep;
+ }
+ }
+ }
+}
+/**
+ * A YAML concrete syntax tree (CST) parser
+ *
+ * ```ts
+ * const src: string = ...
+ * for (const token of new Parser().parse(src)) {
+ * // token: Token
+ * }
+ * ```
+ *
+ * To use the parser with a user-provided lexer:
+ *
+ * ```ts
+ * function* parse(source: string, lexer: Lexer) {
+ * const parser = new Parser()
+ * for (const lexeme of lexer.lex(source))
+ * yield* parser.next(lexeme)
+ * yield* parser.end()
+ * }
+ *
+ * const src: string = ...
+ * const lexer = new Lexer()
+ * for (const token of parse(src, lexer)) {
+ * // token: Token
+ * }
+ * ```
+ */
+class Parser {
+ /**
+ * @param onNewLine - If defined, called separately with the start position of
+ * each new line (in `parse()`, including the start of input).
+ */
+ constructor(onNewLine) {
+ /** If true, space and sequence indicators count as indentation */
+ this.atNewLine = true;
+ /** If true, next token is a scalar value */
+ this.atScalar = false;
+ /** Current indentation level */
+ this.indent = 0;
+ /** Current offset since the start of parsing */
+ this.offset = 0;
+ /** On the same line with a block map key */
+ this.onKeyLine = false;
+ /** Top indicates the node that's currently being built */
+ this.stack = [];
+ /** The source of the current token, set in parse() */
+ this.source = '';
+ /** The type of the current token, set in parse() */
+ this.type = '';
+ // Must be defined after `next()`
+ this.lexer = new Lexer();
+ this.onNewLine = onNewLine;
+ }
+ /**
+ * Parse `source` as a YAML stream.
+ * If `incomplete`, a part of the last line may be left as a buffer for the next call.
+ *
+ * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
+ *
+ * @returns A generator of tokens representing each directive, document, and other structure.
+ */
+ *parse(source, incomplete = false) {
+ if (this.onNewLine && this.offset === 0)
+ this.onNewLine(0);
+ for (const lexeme of this.lexer.lex(source, incomplete))
+ yield* this.next(lexeme);
+ if (!incomplete)
+ yield* this.end();
+ }
+ /**
+ * Advance the parser by the `source` of one lexical token.
+ */
+ *next(source) {
+ this.source = source;
+ if (this.atScalar) {
+ this.atScalar = false;
+ yield* this.step();
+ this.offset += source.length;
+ return;
+ }
+ const type = tokenType(source);
+ if (!type) {
+ const message = `Not a YAML token: ${source}`;
+ yield* this.pop({ type: 'error', offset: this.offset, message, source });
+ this.offset += source.length;
+ }
+ else if (type === 'scalar') {
+ this.atNewLine = false;
+ this.atScalar = true;
+ this.type = 'scalar';
+ }
+ else {
+ this.type = type;
+ yield* this.step();
+ switch (type) {
+ case 'newline':
+ this.atNewLine = true;
+ this.indent = 0;
+ if (this.onNewLine)
+ this.onNewLine(this.offset + source.length);
+ break;
+ case 'space':
+ if (this.atNewLine && source[0] === ' ')
+ this.indent += source.length;
+ break;
+ case 'explicit-key-ind':
+ case 'map-value-ind':
+ case 'seq-item-ind':
+ if (this.atNewLine)
+ this.indent += source.length;
+ break;
+ case 'doc-mode':
+ case 'flow-error-end':
+ return;
+ default:
+ this.atNewLine = false;
+ }
+ this.offset += source.length;
+ }
+ }
+ /** Call at end of input to push out any remaining constructions */
+ *end() {
+ while (this.stack.length > 0)
+ yield* this.pop();
+ }
+ get sourceToken() {
+ const st = {
+ type: this.type,
+ offset: this.offset,
+ indent: this.indent,
+ source: this.source
+ };
+ return st;
+ }
+ *step() {
+ const top = this.peek(1);
+ if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {
+ while (this.stack.length > 0)
+ yield* this.pop();
+ this.stack.push({
+ type: 'doc-end',
+ offset: this.offset,
+ source: this.source
+ });
+ return;
+ }
+ if (!top)
+ return yield* this.stream();
+ switch (top.type) {
+ case 'document':
+ return yield* this.document(top);
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ return yield* this.scalar(top);
+ case 'block-scalar':
+ return yield* this.blockScalar(top);
+ case 'block-map':
+ return yield* this.blockMap(top);
+ case 'block-seq':
+ return yield* this.blockSequence(top);
+ case 'flow-collection':
+ return yield* this.flowCollection(top);
+ case 'doc-end':
+ return yield* this.documentEnd(top);
+ }
+ /* istanbul ignore next should not happen */
+ yield* this.pop();
+ }
+ peek(n) {
+ return this.stack[this.stack.length - n];
+ }
+ *pop(error) {
+ const token = error !== null && error !== void 0 ? error : this.stack.pop();
+ /* istanbul ignore if should not happen */
+ if (!token) {
+ const message = 'Tried to pop an empty stack';
+ yield { type: 'error', offset: this.offset, source: '', message };
+ }
+ else if (this.stack.length === 0) {
+ yield token;
+ }
+ else {
+ const top = this.peek(1);
+ if (token.type === 'block-scalar') {
+ // Block scalars use their parent rather than header indent
+ token.indent = 'indent' in top ? top.indent : 0;
+ }
+ else if (token.type === 'flow-collection' && top.type === 'document') {
+ // Ignore all indent for top-level flow collections
+ token.indent = 0;
+ }
+ if (token.type === 'flow-collection')
+ fixFlowSeqItems(token);
+ switch (top.type) {
+ case 'document':
+ top.value = token;
+ break;
+ case 'block-scalar':
+ top.props.push(token); // error
+ break;
+ case 'block-map': {
+ const it = top.items[top.items.length - 1];
+ if (it.value) {
+ top.items.push({ start: [], key: token, sep: [] });
+ this.onKeyLine = true;
+ return;
+ }
+ else if (it.sep) {
+ it.value = token;
+ }
+ else {
+ Object.assign(it, { key: token, sep: [] });
+ this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');
+ return;
+ }
+ break;
+ }
+ case 'block-seq': {
+ const it = top.items[top.items.length - 1];
+ if (it.value)
+ top.items.push({ start: [], value: token });
+ else
+ it.value = token;
+ break;
+ }
+ case 'flow-collection': {
+ const it = top.items[top.items.length - 1];
+ if (!it || it.value)
+ top.items.push({ start: [], key: token, sep: [] });
+ else if (it.sep)
+ it.value = token;
+ else
+ Object.assign(it, { key: token, sep: [] });
+ return;
+ }
+ /* istanbul ignore next should not happen */
+ default:
+ yield* this.pop();
+ yield* this.pop(token);
+ }
+ if ((top.type === 'document' ||
+ top.type === 'block-map' ||
+ top.type === 'block-seq') &&
+ (token.type === 'block-map' || token.type === 'block-seq')) {
+ const last = token.items[token.items.length - 1];
+ if (last &&
+ !last.sep &&
+ !last.value &&
+ last.start.length > 0 &&
+ !includesNonEmpty(last.start) &&
+ (token.indent === 0 ||
+ last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {
+ if (top.type === 'document')
+ top.end = last.start;
+ else
+ top.items.push({ start: last.start });
+ token.items.splice(-1, 1);
+ }
+ }
+ }
+ }
+ *stream() {
+ switch (this.type) {
+ case 'directive-line':
+ yield { type: 'directive', offset: this.offset, source: this.source };
+ return;
+ case 'byte-order-mark':
+ case 'space':
+ case 'comment':
+ case 'newline':
+ yield this.sourceToken;
+ return;
+ case 'doc-mode':
+ case 'doc-start': {
+ const doc = {
+ type: 'document',
+ offset: this.offset,
+ start: []
+ };
+ if (this.type === 'doc-start')
+ doc.start.push(this.sourceToken);
+ this.stack.push(doc);
+ return;
+ }
+ }
+ yield {
+ type: 'error',
+ offset: this.offset,
+ message: `Unexpected ${this.type} token in YAML stream`,
+ source: this.source
+ };
+ }
+ *document(doc) {
+ if (doc.value)
+ return yield* this.lineEnd(doc);
+ switch (this.type) {
+ case 'doc-start': {
+ if (includesNonEmpty(doc.start)) {
+ yield* this.pop();
+ yield* this.step();
+ }
+ else
+ doc.start.push(this.sourceToken);
+ return;
+ }
+ case 'anchor':
+ case 'tag':
+ case 'space':
+ case 'comment':
+ case 'newline':
+ doc.start.push(this.sourceToken);
+ return;
+ }
+ const bv = this.startBlockValue(doc);
+ if (bv)
+ this.stack.push(bv);
+ else {
+ yield {
+ type: 'error',
+ offset: this.offset,
+ message: `Unexpected ${this.type} token in YAML document`,
+ source: this.source
+ };
+ }
+ }
+ *scalar(scalar) {
+ if (this.type === 'map-value-ind') {
+ const prev = getPrevProps(this.peek(2));
+ const start = getFirstKeyStartProps(prev);
+ let sep;
+ if (scalar.end) {
+ sep = scalar.end;
+ sep.push(this.sourceToken);
+ delete scalar.end;
+ }
+ else
+ sep = [this.sourceToken];
+ const map = {
+ type: 'block-map',
+ offset: scalar.offset,
+ indent: scalar.indent,
+ items: [{ start, key: scalar, sep }]
+ };
+ this.onKeyLine = true;
+ this.stack[this.stack.length - 1] = map;
+ }
+ else
+ yield* this.lineEnd(scalar);
+ }
+ *blockScalar(scalar) {
+ switch (this.type) {
+ case 'space':
+ case 'comment':
+ case 'newline':
+ scalar.props.push(this.sourceToken);
+ return;
+ case 'scalar':
+ scalar.source = this.source;
+ // block-scalar source includes trailing newline
+ this.atNewLine = true;
+ this.indent = 0;
+ if (this.onNewLine) {
+ let nl = this.source.indexOf('\n') + 1;
+ while (nl !== 0) {
+ this.onNewLine(this.offset + nl);
+ nl = this.source.indexOf('\n', nl) + 1;
+ }
+ }
+ yield* this.pop();
+ break;
+ /* istanbul ignore next should not happen */
+ default:
+ yield* this.pop();
+ yield* this.step();
+ }
+ }
+ *blockMap(map) {
+ var _a;
+ const it = map.items[map.items.length - 1];
+ // it.sep is true-ish if pair already has key or : separator
+ switch (this.type) {
+ case 'newline':
+ this.onKeyLine = false;
+ if (it.value) {
+ const end = 'end' in it.value ? it.value.end : undefined;
+ const last = Array.isArray(end) ? end[end.length - 1] : undefined;
+ if ((last === null || last === void 0 ? void 0 : last.type) === 'comment')
+ end === null || end === void 0 ? void 0 : end.push(this.sourceToken);
+ else
+ map.items.push({ start: [this.sourceToken] });
+ }
+ else if (it.sep)
+ it.sep.push(this.sourceToken);
+ else
+ it.start.push(this.sourceToken);
+ return;
+ case 'space':
+ case 'comment':
+ if (it.value)
+ map.items.push({ start: [this.sourceToken] });
+ else if (it.sep)
+ it.sep.push(this.sourceToken);
+ else {
+ if (this.atIndentedComment(it.start, map.indent)) {
+ const prev = map.items[map.items.length - 2];
+ const end = (_a = prev === null || prev === void 0 ? void 0 : prev.value) === null || _a === void 0 ? void 0 : _a.end;
+ if (Array.isArray(end)) {
+ Array.prototype.push.apply(end, it.start);
+ end.push(this.sourceToken);
+ map.items.pop();
+ return;
+ }
+ }
+ it.start.push(this.sourceToken);
+ }
+ return;
+ }
+ if (this.indent >= map.indent) {
+ const atNextItem = !this.onKeyLine &&
+ this.indent === map.indent &&
+ (it.sep || includesNonEmpty(it.start));
+ // For empty nodes, assign newline-separated not indented empty tokens to following node
+ let start = [];
+ if (atNextItem && it.sep && !it.value) {
+ const nl = [];
+ for (let i = 0; i < it.sep.length; ++i) {
+ const st = it.sep[i];
+ switch (st.type) {
+ case 'newline':
+ nl.push(i);
+ break;
+ case 'space':
+ break;
+ case 'comment':
+ if (st.indent > map.indent)
+ nl.length = 0;
+ break;
+ default:
+ nl.length = 0;
+ }
+ }
+ if (nl.length >= 2)
+ start = it.sep.splice(nl[1]);
+ }
+ switch (this.type) {
+ case 'anchor':
+ case 'tag':
+ if (atNextItem || it.value) {
+ start.push(this.sourceToken);
+ map.items.push({ start });
+ this.onKeyLine = true;
+ }
+ else if (it.sep) {
+ it.sep.push(this.sourceToken);
+ }
+ else {
+ it.start.push(this.sourceToken);
+ }
+ return;
+ case 'explicit-key-ind':
+ if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {
+ it.start.push(this.sourceToken);
+ }
+ else if (atNextItem || it.value) {
+ start.push(this.sourceToken);
+ map.items.push({ start });
+ }
+ else {
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start: [this.sourceToken] }]
+ });
+ }
+ this.onKeyLine = true;
+ return;
+ case 'map-value-ind':
+ if (includesToken(it.start, 'explicit-key-ind')) {
+ if (!it.sep) {
+ if (includesToken(it.start, 'newline')) {
+ Object.assign(it, { key: null, sep: [this.sourceToken] });
+ }
+ else {
+ const start = getFirstKeyStartProps(it.start);
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start, key: null, sep: [this.sourceToken] }]
+ });
+ }
+ }
+ else if (it.value) {
+ map.items.push({ start: [], key: null, sep: [this.sourceToken] });
+ }
+ else if (includesToken(it.sep, 'map-value-ind')) {
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start, key: null, sep: [this.sourceToken] }]
+ });
+ }
+ else if (isFlowToken(it.key) &&
+ !includesToken(it.sep, 'newline')) {
+ const start = getFirstKeyStartProps(it.start);
+ const key = it.key;
+ const sep = it.sep;
+ sep.push(this.sourceToken);
+ // @ts-expect-error type guard is wrong here
+ delete it.key, delete it.sep;
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start, key, sep }]
+ });
+ }
+ else if (start.length > 0) {
+ // Not actually at next item
+ it.sep = it.sep.concat(start, this.sourceToken);
+ }
+ else {
+ it.sep.push(this.sourceToken);
+ }
+ }
+ else {
+ if (!it.sep) {
+ Object.assign(it, { key: null, sep: [this.sourceToken] });
+ }
+ else if (it.value || atNextItem) {
+ map.items.push({ start, key: null, sep: [this.sourceToken] });
+ }
+ else if (includesToken(it.sep, 'map-value-ind')) {
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start: [], key: null, sep: [this.sourceToken] }]
+ });
+ }
+ else {
+ it.sep.push(this.sourceToken);
+ }
+ }
+ this.onKeyLine = true;
+ return;
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar': {
+ const fs = this.flowScalar(this.type);
+ if (atNextItem || it.value) {
+ map.items.push({ start, key: fs, sep: [] });
+ this.onKeyLine = true;
+ }
+ else if (it.sep) {
+ this.stack.push(fs);
+ }
+ else {
+ Object.assign(it, { key: fs, sep: [] });
+ this.onKeyLine = true;
+ }
+ return;
+ }
+ default: {
+ const bv = this.startBlockValue(map);
+ if (bv) {
+ if (atNextItem &&
+ bv.type !== 'block-seq' &&
+ includesToken(it.start, 'explicit-key-ind')) {
+ map.items.push({ start });
+ }
+ this.stack.push(bv);
+ return;
+ }
+ }
+ }
+ }
+ yield* this.pop();
+ yield* this.step();
+ }
+ *blockSequence(seq) {
+ var _a;
+ const it = seq.items[seq.items.length - 1];
+ switch (this.type) {
+ case 'newline':
+ if (it.value) {
+ const end = 'end' in it.value ? it.value.end : undefined;
+ const last = Array.isArray(end) ? end[end.length - 1] : undefined;
+ if ((last === null || last === void 0 ? void 0 : last.type) === 'comment')
+ end === null || end === void 0 ? void 0 : end.push(this.sourceToken);
+ else
+ seq.items.push({ start: [this.sourceToken] });
+ }
+ else
+ it.start.push(this.sourceToken);
+ return;
+ case 'space':
+ case 'comment':
+ if (it.value)
+ seq.items.push({ start: [this.sourceToken] });
+ else {
+ if (this.atIndentedComment(it.start, seq.indent)) {
+ const prev = seq.items[seq.items.length - 2];
+ const end = (_a = prev === null || prev === void 0 ? void 0 : prev.value) === null || _a === void 0 ? void 0 : _a.end;
+ if (Array.isArray(end)) {
+ Array.prototype.push.apply(end, it.start);
+ end.push(this.sourceToken);
+ seq.items.pop();
+ return;
+ }
+ }
+ it.start.push(this.sourceToken);
+ }
+ return;
+ case 'anchor':
+ case 'tag':
+ if (it.value || this.indent <= seq.indent)
+ break;
+ it.start.push(this.sourceToken);
+ return;
+ case 'seq-item-ind':
+ if (this.indent !== seq.indent)
+ break;
+ if (it.value || includesToken(it.start, 'seq-item-ind'))
+ seq.items.push({ start: [this.sourceToken] });
+ else
+ it.start.push(this.sourceToken);
+ return;
+ }
+ if (this.indent > seq.indent) {
+ const bv = this.startBlockValue(seq);
+ if (bv) {
+ this.stack.push(bv);
+ return;
+ }
+ }
+ yield* this.pop();
+ yield* this.step();
+ }
+ *flowCollection(fc) {
+ const it = fc.items[fc.items.length - 1];
+ if (this.type === 'flow-error-end') {
+ let top;
+ do {
+ yield* this.pop();
+ top = this.peek(1);
+ } while (top && top.type === 'flow-collection');
+ }
+ else if (fc.end.length === 0) {
+ switch (this.type) {
+ case 'comma':
+ case 'explicit-key-ind':
+ if (!it || it.sep)
+ fc.items.push({ start: [this.sourceToken] });
+ else
+ it.start.push(this.sourceToken);
+ return;
+ case 'map-value-ind':
+ if (!it || it.value)
+ fc.items.push({ start: [], key: null, sep: [this.sourceToken] });
+ else if (it.sep)
+ it.sep.push(this.sourceToken);
+ else
+ Object.assign(it, { key: null, sep: [this.sourceToken] });
+ return;
+ case 'space':
+ case 'comment':
+ case 'newline':
+ case 'anchor':
+ case 'tag':
+ if (!it || it.value)
+ fc.items.push({ start: [this.sourceToken] });
+ else if (it.sep)
+ it.sep.push(this.sourceToken);
+ else
+ it.start.push(this.sourceToken);
+ return;
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar': {
+ const fs = this.flowScalar(this.type);
+ if (!it || it.value)
+ fc.items.push({ start: [], key: fs, sep: [] });
+ else if (it.sep)
+ this.stack.push(fs);
+ else
+ Object.assign(it, { key: fs, sep: [] });
+ return;
+ }
+ case 'flow-map-end':
+ case 'flow-seq-end':
+ fc.end.push(this.sourceToken);
+ return;
+ }
+ const bv = this.startBlockValue(fc);
+ /* istanbul ignore else should not happen */
+ if (bv)
+ this.stack.push(bv);
+ else {
+ yield* this.pop();
+ yield* this.step();
+ }
+ }
+ else {
+ const parent = this.peek(2);
+ if (parent.type === 'block-map' &&
+ ((this.type === 'map-value-ind' && parent.indent === fc.indent) ||
+ (this.type === 'newline' &&
+ !parent.items[parent.items.length - 1].sep))) {
+ yield* this.pop();
+ yield* this.step();
+ }
+ else if (this.type === 'map-value-ind' &&
+ parent.type !== 'flow-collection') {
+ const prev = getPrevProps(parent);
+ const start = getFirstKeyStartProps(prev);
+ fixFlowSeqItems(fc);
+ const sep = fc.end.splice(1, fc.end.length);
+ sep.push(this.sourceToken);
+ const map = {
+ type: 'block-map',
+ offset: fc.offset,
+ indent: fc.indent,
+ items: [{ start, key: fc, sep }]
+ };
+ this.onKeyLine = true;
+ this.stack[this.stack.length - 1] = map;
+ }
+ else {
+ yield* this.lineEnd(fc);
+ }
+ }
+ }
+ flowScalar(type) {
+ if (this.onNewLine) {
+ let nl = this.source.indexOf('\n') + 1;
+ while (nl !== 0) {
+ this.onNewLine(this.offset + nl);
+ nl = this.source.indexOf('\n', nl) + 1;
+ }
+ }
+ return {
+ type,
+ offset: this.offset,
+ indent: this.indent,
+ source: this.source
+ };
+ }
+ startBlockValue(parent) {
+ switch (this.type) {
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ return this.flowScalar(this.type);
+ case 'block-scalar-header':
+ return {
+ type: 'block-scalar',
+ offset: this.offset,
+ indent: this.indent,
+ props: [this.sourceToken],
+ source: ''
+ };
+ case 'flow-map-start':
+ case 'flow-seq-start':
+ return {
+ type: 'flow-collection',
+ offset: this.offset,
+ indent: this.indent,
+ start: this.sourceToken,
+ items: [],
+ end: []
+ };
+ case 'seq-item-ind':
+ return {
+ type: 'block-seq',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start: [this.sourceToken] }]
+ };
+ case 'explicit-key-ind': {
+ this.onKeyLine = true;
+ const prev = getPrevProps(parent);
+ const start = getFirstKeyStartProps(prev);
+ start.push(this.sourceToken);
+ return {
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start }]
+ };
+ }
+ case 'map-value-ind': {
+ this.onKeyLine = true;
+ const prev = getPrevProps(parent);
+ const start = getFirstKeyStartProps(prev);
+ return {
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start, key: null, sep: [this.sourceToken] }]
+ };
+ }
+ }
+ return null;
+ }
+ atIndentedComment(start, indent) {
+ if (this.type !== 'comment')
+ return false;
+ if (this.indent <= indent)
+ return false;
+ return start.every(st => st.type === 'newline' || st.type === 'space');
+ }
+ *documentEnd(docEnd) {
+ if (this.type !== 'doc-mode') {
+ if (docEnd.end)
+ docEnd.end.push(this.sourceToken);
+ else
+ docEnd.end = [this.sourceToken];
+ if (this.type === 'newline')
+ yield* this.pop();
+ }
+ }
+ *lineEnd(token) {
+ switch (this.type) {
+ case 'comma':
+ case 'doc-start':
+ case 'doc-end':
+ case 'flow-seq-end':
+ case 'flow-map-end':
+ case 'map-value-ind':
+ yield* this.pop();
+ yield* this.step();
+ break;
+ case 'newline':
+ this.onKeyLine = false;
+ // fallthrough
+ case 'space':
+ case 'comment':
+ default:
+ // all other values are errors
+ if (token.end)
+ token.end.push(this.sourceToken);
+ else
+ token.end = [this.sourceToken];
+ if (this.type === 'newline')
+ yield* this.pop();
+ }
+ }
+}
+
+export { Parser };
diff --git a/node_modules/yaml/browser/dist/public-api.js b/node_modules/yaml/browser/dist/public-api.js
new file mode 100644
index 0000000..56f5482
--- /dev/null
+++ b/node_modules/yaml/browser/dist/public-api.js
@@ -0,0 +1,100 @@
+import { Composer } from './compose/composer.js';
+import { Document } from './doc/Document.js';
+import { prettifyError, YAMLParseError } from './errors.js';
+import { warn } from './log.js';
+import { LineCounter } from './parse/line-counter.js';
+import { Parser } from './parse/parser.js';
+
+function parseOptions(options) {
+ const prettyErrors = options.prettyErrors !== false;
+ const lineCounter = options.lineCounter || (prettyErrors && new LineCounter()) || null;
+ return { lineCounter, prettyErrors };
+}
+/**
+ * Parse the input as a stream of YAML documents.
+ *
+ * Documents should be separated from each other by `...` or `---` marker lines.
+ *
+ * @returns If an empty `docs` array is returned, it will be of type
+ * EmptyStream and contain additional stream information. In
+ * TypeScript, you should use `'empty' in docs` as a type guard for it.
+ */
+function parseAllDocuments(source, options = {}) {
+ const { lineCounter, prettyErrors } = parseOptions(options);
+ const parser = new Parser(lineCounter === null || lineCounter === void 0 ? void 0 : lineCounter.addNewLine);
+ const composer = new Composer(options);
+ const docs = Array.from(composer.compose(parser.parse(source)));
+ if (prettyErrors && lineCounter)
+ for (const doc of docs) {
+ doc.errors.forEach(prettifyError(source, lineCounter));
+ doc.warnings.forEach(prettifyError(source, lineCounter));
+ }
+ if (docs.length > 0)
+ return docs;
+ return Object.assign([], { empty: true }, composer.streamInfo());
+}
+/** Parse an input string into a single YAML.Document */
+function parseDocument(source, options = {}) {
+ const { lineCounter, prettyErrors } = parseOptions(options);
+ const parser = new Parser(lineCounter === null || lineCounter === void 0 ? void 0 : lineCounter.addNewLine);
+ const composer = new Composer(options);
+ // `doc` is always set by compose.end(true) at the very latest
+ let doc = null;
+ for (const _doc of composer.compose(parser.parse(source), true, source.length)) {
+ if (!doc)
+ doc = _doc;
+ else if (doc.options.logLevel !== 'silent') {
+ doc.errors.push(new YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));
+ break;
+ }
+ }
+ if (prettyErrors && lineCounter) {
+ doc.errors.forEach(prettifyError(source, lineCounter));
+ doc.warnings.forEach(prettifyError(source, lineCounter));
+ }
+ return doc;
+}
+function parse(src, reviver, options) {
+ let _reviver = undefined;
+ if (typeof reviver === 'function') {
+ _reviver = reviver;
+ }
+ else if (options === undefined && reviver && typeof reviver === 'object') {
+ options = reviver;
+ }
+ const doc = parseDocument(src, options);
+ if (!doc)
+ return null;
+ doc.warnings.forEach(warning => warn(doc.options.logLevel, warning));
+ if (doc.errors.length > 0) {
+ if (doc.options.logLevel !== 'silent')
+ throw doc.errors[0];
+ else
+ doc.errors = [];
+ }
+ return doc.toJS(Object.assign({ reviver: _reviver }, options));
+}
+function stringify(value, replacer, options) {
+ var _a;
+ let _replacer = null;
+ if (typeof replacer === 'function' || Array.isArray(replacer)) {
+ _replacer = replacer;
+ }
+ else if (options === undefined && replacer) {
+ options = replacer;
+ }
+ if (typeof options === 'string')
+ options = options.length;
+ if (typeof options === 'number') {
+ const indent = Math.round(options);
+ options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };
+ }
+ if (value === undefined) {
+ const { keepUndefined } = (_a = options !== null && options !== void 0 ? options : replacer) !== null && _a !== void 0 ? _a : {};
+ if (!keepUndefined)
+ return undefined;
+ }
+ return new Document(value, _replacer, options).toString(options);
+}
+
+export { parse, parseAllDocuments, parseDocument, stringify };
diff --git a/node_modules/yaml/browser/dist/schema/Schema.js b/node_modules/yaml/browser/dist/schema/Schema.js
new file mode 100644
index 0000000..f2b8fb8
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/Schema.js
@@ -0,0 +1,38 @@
+import { MAP, SCALAR, SEQ } from '../nodes/Node.js';
+import { map } from './common/map.js';
+import { seq } from './common/seq.js';
+import { string } from './common/string.js';
+import { getTags, coreKnownTags } from './tags.js';
+
+const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
+class Schema {
+ constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {
+ this.compat = Array.isArray(compat)
+ ? getTags(compat, 'compat')
+ : compat
+ ? getTags(null, compat)
+ : null;
+ this.merge = !!merge;
+ this.name = (typeof schema === 'string' && schema) || 'core';
+ this.knownTags = resolveKnownTags ? coreKnownTags : {};
+ this.tags = getTags(customTags, this.name);
+ this.toStringOptions = toStringDefaults !== null && toStringDefaults !== void 0 ? toStringDefaults : null;
+ Object.defineProperty(this, MAP, { value: map });
+ Object.defineProperty(this, SCALAR, { value: string });
+ Object.defineProperty(this, SEQ, { value: seq });
+ // Used by createMap()
+ this.sortMapEntries =
+ typeof sortMapEntries === 'function'
+ ? sortMapEntries
+ : sortMapEntries === true
+ ? sortMapEntriesByKey
+ : null;
+ }
+ clone() {
+ const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));
+ copy.tags = this.tags.slice();
+ return copy;
+ }
+}
+
+export { Schema };
diff --git a/node_modules/yaml/browser/dist/schema/common/map.js b/node_modules/yaml/browser/dist/schema/common/map.js
new file mode 100644
index 0000000..133d861
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/common/map.js
@@ -0,0 +1,42 @@
+import { isMap } from '../../nodes/Node.js';
+import { createPair } from '../../nodes/Pair.js';
+import { YAMLMap } from '../../nodes/YAMLMap.js';
+
+function createMap(schema, obj, ctx) {
+ const { keepUndefined, replacer } = ctx;
+ const map = new YAMLMap(schema);
+ const add = (key, value) => {
+ if (typeof replacer === 'function')
+ value = replacer.call(obj, key, value);
+ else if (Array.isArray(replacer) && !replacer.includes(key))
+ return;
+ if (value !== undefined || keepUndefined)
+ map.items.push(createPair(key, value, ctx));
+ };
+ if (obj instanceof Map) {
+ for (const [key, value] of obj)
+ add(key, value);
+ }
+ else if (obj && typeof obj === 'object') {
+ for (const key of Object.keys(obj))
+ add(key, obj[key]);
+ }
+ if (typeof schema.sortMapEntries === 'function') {
+ map.items.sort(schema.sortMapEntries);
+ }
+ return map;
+}
+const map = {
+ collection: 'map',
+ createNode: createMap,
+ default: true,
+ nodeClass: YAMLMap,
+ tag: 'tag:yaml.org,2002:map',
+ resolve(map, onError) {
+ if (!isMap(map))
+ onError('Expected a mapping for this tag');
+ return map;
+ }
+};
+
+export { map };
diff --git a/node_modules/yaml/browser/dist/schema/common/null.js b/node_modules/yaml/browser/dist/schema/common/null.js
new file mode 100644
index 0000000..fcbe1b7
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/common/null.js
@@ -0,0 +1,15 @@
+import { Scalar } from '../../nodes/Scalar.js';
+
+const nullTag = {
+ identify: value => value == null,
+ createNode: () => new Scalar(null),
+ default: true,
+ tag: 'tag:yaml.org,2002:null',
+ test: /^(?:~|[Nn]ull|NULL)?$/,
+ resolve: () => new Scalar(null),
+ stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)
+ ? source
+ : ctx.options.nullStr
+};
+
+export { nullTag };
diff --git a/node_modules/yaml/browser/dist/schema/common/seq.js b/node_modules/yaml/browser/dist/schema/common/seq.js
new file mode 100644
index 0000000..2aa7639
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/common/seq.js
@@ -0,0 +1,33 @@
+import { createNode } from '../../doc/createNode.js';
+import { isSeq } from '../../nodes/Node.js';
+import { YAMLSeq } from '../../nodes/YAMLSeq.js';
+
+function createSeq(schema, obj, ctx) {
+ const { replacer } = ctx;
+ const seq = new YAMLSeq(schema);
+ if (obj && Symbol.iterator in Object(obj)) {
+ let i = 0;
+ for (let it of obj) {
+ if (typeof replacer === 'function') {
+ const key = obj instanceof Set ? it : String(i++);
+ it = replacer.call(obj, key, it);
+ }
+ seq.items.push(createNode(it, undefined, ctx));
+ }
+ }
+ return seq;
+}
+const seq = {
+ collection: 'seq',
+ createNode: createSeq,
+ default: true,
+ nodeClass: YAMLSeq,
+ tag: 'tag:yaml.org,2002:seq',
+ resolve(seq, onError) {
+ if (!isSeq(seq))
+ onError('Expected a sequence for this tag');
+ return seq;
+ }
+};
+
+export { seq };
diff --git a/node_modules/yaml/browser/dist/schema/common/string.js b/node_modules/yaml/browser/dist/schema/common/string.js
new file mode 100644
index 0000000..a064f7b
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/common/string.js
@@ -0,0 +1,14 @@
+import { stringifyString } from '../../stringify/stringifyString.js';
+
+const string = {
+ identify: value => typeof value === 'string',
+ default: true,
+ tag: 'tag:yaml.org,2002:str',
+ resolve: str => str,
+ stringify(item, ctx, onComment, onChompKeep) {
+ ctx = Object.assign({ actualString: true }, ctx);
+ return stringifyString(item, ctx, onComment, onChompKeep);
+ }
+};
+
+export { string };
diff --git a/node_modules/yaml/browser/dist/schema/core/bool.js b/node_modules/yaml/browser/dist/schema/core/bool.js
new file mode 100644
index 0000000..ab3c943
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/core/bool.js
@@ -0,0 +1,19 @@
+import { Scalar } from '../../nodes/Scalar.js';
+
+const boolTag = {
+ identify: value => typeof value === 'boolean',
+ default: true,
+ tag: 'tag:yaml.org,2002:bool',
+ test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,
+ resolve: str => new Scalar(str[0] === 't' || str[0] === 'T'),
+ stringify({ source, value }, ctx) {
+ if (source && boolTag.test.test(source)) {
+ const sv = source[0] === 't' || source[0] === 'T';
+ if (value === sv)
+ return source;
+ }
+ return value ? ctx.options.trueStr : ctx.options.falseStr;
+ }
+};
+
+export { boolTag };
diff --git a/node_modules/yaml/browser/dist/schema/core/float.js b/node_modules/yaml/browser/dist/schema/core/float.js
new file mode 100644
index 0000000..a632cb7
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/core/float.js
@@ -0,0 +1,43 @@
+import { Scalar } from '../../nodes/Scalar.js';
+import { stringifyNumber } from '../../stringify/stringifyNumber.js';
+
+const floatNaN = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/,
+ resolve: str => str.slice(-3).toLowerCase() === 'nan'
+ ? NaN
+ : str[0] === '-'
+ ? Number.NEGATIVE_INFINITY
+ : Number.POSITIVE_INFINITY,
+ stringify: stringifyNumber
+};
+const floatExp = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ format: 'EXP',
+ test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/,
+ resolve: str => parseFloat(str),
+ stringify(node) {
+ const num = Number(node.value);
+ return isFinite(num) ? num.toExponential() : stringifyNumber(node);
+ }
+};
+const float = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/,
+ resolve(str) {
+ const node = new Scalar(parseFloat(str));
+ const dot = str.indexOf('.');
+ if (dot !== -1 && str[str.length - 1] === '0')
+ node.minFractionDigits = str.length - dot - 1;
+ return node;
+ },
+ stringify: stringifyNumber
+};
+
+export { float, floatExp, floatNaN };
diff --git a/node_modules/yaml/browser/dist/schema/core/int.js b/node_modules/yaml/browser/dist/schema/core/int.js
new file mode 100644
index 0000000..7091235
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/core/int.js
@@ -0,0 +1,38 @@
+import { stringifyNumber } from '../../stringify/stringifyNumber.js';
+
+const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
+const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));
+function intStringify(node, radix, prefix) {
+ const { value } = node;
+ if (intIdentify(value) && value >= 0)
+ return prefix + value.toString(radix);
+ return stringifyNumber(node);
+}
+const intOct = {
+ identify: value => intIdentify(value) && value >= 0,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'OCT',
+ test: /^0o[0-7]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt),
+ stringify: node => intStringify(node, 8, '0o')
+};
+const int = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ test: /^[-+]?[0-9]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
+ stringify: stringifyNumber
+};
+const intHex = {
+ identify: value => intIdentify(value) && value >= 0,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'HEX',
+ test: /^0x[0-9a-fA-F]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
+ stringify: node => intStringify(node, 16, '0x')
+};
+
+export { int, intHex, intOct };
diff --git a/node_modules/yaml/browser/dist/schema/core/schema.js b/node_modules/yaml/browser/dist/schema/core/schema.js
new file mode 100644
index 0000000..dd02b2e
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/core/schema.js
@@ -0,0 +1,23 @@
+import { map } from '../common/map.js';
+import { nullTag } from '../common/null.js';
+import { seq } from '../common/seq.js';
+import { string } from '../common/string.js';
+import { boolTag } from './bool.js';
+import { floatNaN, floatExp, float } from './float.js';
+import { intOct, int, intHex } from './int.js';
+
+const schema = [
+ map,
+ seq,
+ string,
+ nullTag,
+ boolTag,
+ intOct,
+ int,
+ intHex,
+ floatNaN,
+ floatExp,
+ float
+];
+
+export { schema };
diff --git a/node_modules/yaml/browser/dist/schema/json/schema.js b/node_modules/yaml/browser/dist/schema/json/schema.js
new file mode 100644
index 0000000..16d75ce
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/json/schema.js
@@ -0,0 +1,62 @@
+import { Scalar } from '../../nodes/Scalar.js';
+import { map } from '../common/map.js';
+import { seq } from '../common/seq.js';
+
+function intIdentify(value) {
+ return typeof value === 'bigint' || Number.isInteger(value);
+}
+const stringifyJSON = ({ value }) => JSON.stringify(value);
+const jsonScalars = [
+ {
+ identify: value => typeof value === 'string',
+ default: true,
+ tag: 'tag:yaml.org,2002:str',
+ resolve: str => str,
+ stringify: stringifyJSON
+ },
+ {
+ identify: value => value == null,
+ createNode: () => new Scalar(null),
+ default: true,
+ tag: 'tag:yaml.org,2002:null',
+ test: /^null$/,
+ resolve: () => null,
+ stringify: stringifyJSON
+ },
+ {
+ identify: value => typeof value === 'boolean',
+ default: true,
+ tag: 'tag:yaml.org,2002:bool',
+ test: /^true|false$/,
+ resolve: str => str === 'true',
+ stringify: stringifyJSON
+ },
+ {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ test: /^-?(?:0|[1-9][0-9]*)$/,
+ resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),
+ stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value)
+ },
+ {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,
+ resolve: str => parseFloat(str),
+ stringify: stringifyJSON
+ }
+];
+const jsonError = {
+ default: true,
+ tag: '',
+ test: /^/,
+ resolve(str, onError) {
+ onError(`Unresolved plain scalar ${JSON.stringify(str)}`);
+ return str;
+ }
+};
+const schema = [map, seq].concat(jsonScalars, jsonError);
+
+export { schema };
diff --git a/node_modules/yaml/browser/dist/schema/tags.js b/node_modules/yaml/browser/dist/schema/tags.js
new file mode 100644
index 0000000..f67e3e0
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/tags.js
@@ -0,0 +1,83 @@
+import { map } from './common/map.js';
+import { nullTag } from './common/null.js';
+import { seq } from './common/seq.js';
+import { string } from './common/string.js';
+import { boolTag } from './core/bool.js';
+import { float, floatExp, floatNaN } from './core/float.js';
+import { int, intHex, intOct } from './core/int.js';
+import { schema } from './core/schema.js';
+import { schema as schema$1 } from './json/schema.js';
+import { binary } from './yaml-1.1/binary.js';
+import { omap } from './yaml-1.1/omap.js';
+import { pairs } from './yaml-1.1/pairs.js';
+import { schema as schema$2 } from './yaml-1.1/schema.js';
+import { set } from './yaml-1.1/set.js';
+import { floatTime, intTime, timestamp } from './yaml-1.1/timestamp.js';
+
+const schemas = new Map([
+ ['core', schema],
+ ['failsafe', [map, seq, string]],
+ ['json', schema$1],
+ ['yaml11', schema$2],
+ ['yaml-1.1', schema$2]
+]);
+const tagsByName = {
+ binary,
+ bool: boolTag,
+ float,
+ floatExp,
+ floatNaN,
+ floatTime,
+ int,
+ intHex,
+ intOct,
+ intTime,
+ map,
+ null: nullTag,
+ omap,
+ pairs,
+ seq,
+ set,
+ timestamp
+};
+const coreKnownTags = {
+ 'tag:yaml.org,2002:binary': binary,
+ 'tag:yaml.org,2002:omap': omap,
+ 'tag:yaml.org,2002:pairs': pairs,
+ 'tag:yaml.org,2002:set': set,
+ 'tag:yaml.org,2002:timestamp': timestamp
+};
+function getTags(customTags, schemaName) {
+ let tags = schemas.get(schemaName);
+ if (!tags) {
+ if (Array.isArray(customTags))
+ tags = [];
+ else {
+ const keys = Array.from(schemas.keys())
+ .filter(key => key !== 'yaml11')
+ .map(key => JSON.stringify(key))
+ .join(', ');
+ throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`);
+ }
+ }
+ if (Array.isArray(customTags)) {
+ for (const tag of customTags)
+ tags = tags.concat(tag);
+ }
+ else if (typeof customTags === 'function') {
+ tags = customTags(tags.slice());
+ }
+ return tags.map(tag => {
+ if (typeof tag !== 'string')
+ return tag;
+ const tagObj = tagsByName[tag];
+ if (tagObj)
+ return tagObj;
+ const keys = Object.keys(tagsByName)
+ .map(key => JSON.stringify(key))
+ .join(', ');
+ throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
+ });
+}
+
+export { coreKnownTags, getTags };
diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js
new file mode 100644
index 0000000..a700819
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js
@@ -0,0 +1,66 @@
+import { Scalar } from '../../nodes/Scalar.js';
+import { stringifyString } from '../../stringify/stringifyString.js';
+
+const binary = {
+ identify: value => value instanceof Uint8Array,
+ default: false,
+ tag: 'tag:yaml.org,2002:binary',
+ /**
+ * Returns a Buffer in node and an Uint8Array in browsers
+ *
+ * To use the resulting buffer as an image, you'll want to do something like:
+ *
+ * const blob = new Blob([buffer], { type: 'image/jpeg' })
+ * document.querySelector('#photo').src = URL.createObjectURL(blob)
+ */
+ resolve(src, onError) {
+ if (typeof Buffer === 'function') {
+ return Buffer.from(src, 'base64');
+ }
+ else if (typeof atob === 'function') {
+ // On IE 11, atob() can't handle newlines
+ const str = atob(src.replace(/[\n\r]/g, ''));
+ const buffer = new Uint8Array(str.length);
+ for (let i = 0; i < str.length; ++i)
+ buffer[i] = str.charCodeAt(i);
+ return buffer;
+ }
+ else {
+ onError('This environment does not support reading binary tags; either Buffer or atob is required');
+ return src;
+ }
+ },
+ stringify({ comment, type, value }, ctx, onComment, onChompKeep) {
+ const buf = value; // checked earlier by binary.identify()
+ let str;
+ if (typeof Buffer === 'function') {
+ str =
+ buf instanceof Buffer
+ ? buf.toString('base64')
+ : Buffer.from(buf.buffer).toString('base64');
+ }
+ else if (typeof btoa === 'function') {
+ let s = '';
+ for (let i = 0; i < buf.length; ++i)
+ s += String.fromCharCode(buf[i]);
+ str = btoa(s);
+ }
+ else {
+ throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
+ }
+ if (!type)
+ type = Scalar.BLOCK_LITERAL;
+ if (type !== Scalar.QUOTE_DOUBLE) {
+ const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);
+ const n = Math.ceil(str.length / lineWidth);
+ const lines = new Array(n);
+ for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {
+ lines[i] = str.substr(o, lineWidth);
+ }
+ str = lines.join(type === Scalar.BLOCK_LITERAL ? '\n' : ' ');
+ }
+ return stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);
+ }
+};
+
+export { binary };
diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js
new file mode 100644
index 0000000..1ced791
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js
@@ -0,0 +1,26 @@
+import { Scalar } from '../../nodes/Scalar.js';
+
+function boolStringify({ value, source }, ctx) {
+ const boolObj = value ? trueTag : falseTag;
+ if (source && boolObj.test.test(source))
+ return source;
+ return value ? ctx.options.trueStr : ctx.options.falseStr;
+}
+const trueTag = {
+ identify: value => value === true,
+ default: true,
+ tag: 'tag:yaml.org,2002:bool',
+ test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,
+ resolve: () => new Scalar(true),
+ stringify: boolStringify
+};
+const falseTag = {
+ identify: value => value === false,
+ default: true,
+ tag: 'tag:yaml.org,2002:bool',
+ test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,
+ resolve: () => new Scalar(false),
+ stringify: boolStringify
+};
+
+export { falseTag, trueTag };
diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/float.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/float.js
new file mode 100644
index 0000000..9097266
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/yaml-1.1/float.js
@@ -0,0 +1,46 @@
+import { Scalar } from '../../nodes/Scalar.js';
+import { stringifyNumber } from '../../stringify/stringifyNumber.js';
+
+const floatNaN = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/,
+ resolve: (str) => str.slice(-3).toLowerCase() === 'nan'
+ ? NaN
+ : str[0] === '-'
+ ? Number.NEGATIVE_INFINITY
+ : Number.POSITIVE_INFINITY,
+ stringify: stringifyNumber
+};
+const floatExp = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ format: 'EXP',
+ test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/,
+ resolve: (str) => parseFloat(str.replace(/_/g, '')),
+ stringify(node) {
+ const num = Number(node.value);
+ return isFinite(num) ? num.toExponential() : stringifyNumber(node);
+ }
+};
+const float = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/,
+ resolve(str) {
+ const node = new Scalar(parseFloat(str.replace(/_/g, '')));
+ const dot = str.indexOf('.');
+ if (dot !== -1) {
+ const f = str.substring(dot + 1).replace(/_/g, '');
+ if (f[f.length - 1] === '0')
+ node.minFractionDigits = f.length;
+ }
+ return node;
+ },
+ stringify: stringifyNumber
+};
+
+export { float, floatExp, floatNaN };
diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/int.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/int.js
new file mode 100644
index 0000000..f572823
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/yaml-1.1/int.js
@@ -0,0 +1,71 @@
+import { stringifyNumber } from '../../stringify/stringifyNumber.js';
+
+const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
+function intResolve(str, offset, radix, { intAsBigInt }) {
+ const sign = str[0];
+ if (sign === '-' || sign === '+')
+ offset += 1;
+ str = str.substring(offset).replace(/_/g, '');
+ if (intAsBigInt) {
+ switch (radix) {
+ case 2:
+ str = `0b${str}`;
+ break;
+ case 8:
+ str = `0o${str}`;
+ break;
+ case 16:
+ str = `0x${str}`;
+ break;
+ }
+ const n = BigInt(str);
+ return sign === '-' ? BigInt(-1) * n : n;
+ }
+ const n = parseInt(str, radix);
+ return sign === '-' ? -1 * n : n;
+}
+function intStringify(node, radix, prefix) {
+ const { value } = node;
+ if (intIdentify(value)) {
+ const str = value.toString(radix);
+ return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;
+ }
+ return stringifyNumber(node);
+}
+const intBin = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'BIN',
+ test: /^[-+]?0b[0-1_]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),
+ stringify: node => intStringify(node, 2, '0b')
+};
+const intOct = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'OCT',
+ test: /^[-+]?0[0-7_]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),
+ stringify: node => intStringify(node, 8, '0')
+};
+const int = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ test: /^[-+]?[0-9][0-9_]*$/,
+ resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
+ stringify: stringifyNumber
+};
+const intHex = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'HEX',
+ test: /^[-+]?0x[0-9a-fA-F_]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
+ stringify: node => intStringify(node, 16, '0x')
+};
+
+export { int, intBin, intHex, intOct };
diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js
new file mode 100644
index 0000000..71e2527
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js
@@ -0,0 +1,73 @@
+import { YAMLSeq } from '../../nodes/YAMLSeq.js';
+import { toJS } from '../../nodes/toJS.js';
+import { isScalar, isPair } from '../../nodes/Node.js';
+import { YAMLMap } from '../../nodes/YAMLMap.js';
+import { resolvePairs, createPairs } from './pairs.js';
+
+class YAMLOMap extends YAMLSeq {
+ constructor() {
+ super();
+ this.add = YAMLMap.prototype.add.bind(this);
+ this.delete = YAMLMap.prototype.delete.bind(this);
+ this.get = YAMLMap.prototype.get.bind(this);
+ this.has = YAMLMap.prototype.has.bind(this);
+ this.set = YAMLMap.prototype.set.bind(this);
+ this.tag = YAMLOMap.tag;
+ }
+ /**
+ * If `ctx` is given, the return type is actually `Map<unknown, unknown>`,
+ * but TypeScript won't allow widening the signature of a child method.
+ */
+ toJSON(_, ctx) {
+ if (!ctx)
+ return super.toJSON(_);
+ const map = new Map();
+ if (ctx === null || ctx === void 0 ? void 0 : ctx.onCreate)
+ ctx.onCreate(map);
+ for (const pair of this.items) {
+ let key, value;
+ if (isPair(pair)) {
+ key = toJS(pair.key, '', ctx);
+ value = toJS(pair.value, key, ctx);
+ }
+ else {
+ key = toJS(pair, '', ctx);
+ }
+ if (map.has(key))
+ throw new Error('Ordered maps must not include duplicate keys');
+ map.set(key, value);
+ }
+ return map;
+ }
+}
+YAMLOMap.tag = 'tag:yaml.org,2002:omap';
+const omap = {
+ collection: 'seq',
+ identify: value => value instanceof Map,
+ nodeClass: YAMLOMap,
+ default: false,
+ tag: 'tag:yaml.org,2002:omap',
+ resolve(seq, onError) {
+ const pairs = resolvePairs(seq, onError);
+ const seenKeys = [];
+ for (const { key } of pairs.items) {
+ if (isScalar(key)) {
+ if (seenKeys.includes(key.value)) {
+ onError(`Ordered maps must not include duplicate keys: ${key.value}`);
+ }
+ else {
+ seenKeys.push(key.value);
+ }
+ }
+ }
+ return Object.assign(new YAMLOMap(), pairs);
+ },
+ createNode(schema, iterable, ctx) {
+ const pairs = createPairs(schema, iterable, ctx);
+ const omap = new YAMLOMap();
+ omap.items = pairs.items;
+ return omap;
+ }
+};
+
+export { YAMLOMap, omap };
diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js
new file mode 100644
index 0000000..5c02478
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js
@@ -0,0 +1,78 @@
+import { isSeq, isPair, isMap } from '../../nodes/Node.js';
+import { Pair, createPair } from '../../nodes/Pair.js';
+import { Scalar } from '../../nodes/Scalar.js';
+import { YAMLSeq } from '../../nodes/YAMLSeq.js';
+
+function resolvePairs(seq, onError) {
+ var _a;
+ if (isSeq(seq)) {
+ for (let i = 0; i < seq.items.length; ++i) {
+ let item = seq.items[i];
+ if (isPair(item))
+ continue;
+ else if (isMap(item)) {
+ if (item.items.length > 1)
+ onError('Each pair must have its own sequence indicator');
+ const pair = item.items[0] || new Pair(new Scalar(null));
+ if (item.commentBefore)
+ pair.key.commentBefore = pair.key.commentBefore
+ ? `${item.commentBefore}\n${pair.key.commentBefore}`
+ : item.commentBefore;
+ if (item.comment) {
+ const cn = (_a = pair.value) !== null && _a !== void 0 ? _a : pair.key;
+ cn.comment = cn.comment
+ ? `${item.comment}\n${cn.comment}`
+ : item.comment;
+ }
+ item = pair;
+ }
+ seq.items[i] = isPair(item) ? item : new Pair(item);
+ }
+ }
+ else
+ onError('Expected a sequence for this tag');
+ return seq;
+}
+function createPairs(schema, iterable, ctx) {
+ const { replacer } = ctx;
+ const pairs = new YAMLSeq(schema);
+ pairs.tag = 'tag:yaml.org,2002:pairs';
+ let i = 0;
+ if (iterable && Symbol.iterator in Object(iterable))
+ for (let it of iterable) {
+ if (typeof replacer === 'function')
+ it = replacer.call(iterable, String(i++), it);
+ let key, value;
+ if (Array.isArray(it)) {
+ if (it.length === 2) {
+ key = it[0];
+ value = it[1];
+ }
+ else
+ throw new TypeError(`Expected [key, value] tuple: ${it}`);
+ }
+ else if (it && it instanceof Object) {
+ const keys = Object.keys(it);
+ if (keys.length === 1) {
+ key = keys[0];
+ value = it[key];
+ }
+ else
+ throw new TypeError(`Expected { key: value } tuple: ${it}`);
+ }
+ else {
+ key = it;
+ }
+ pairs.items.push(createPair(key, value, ctx));
+ }
+ return pairs;
+}
+const pairs = {
+ collection: 'seq',
+ default: false,
+ tag: 'tag:yaml.org,2002:pairs',
+ resolve: resolvePairs,
+ createNode: createPairs
+};
+
+export { createPairs, pairs, resolvePairs };
diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js
new file mode 100644
index 0000000..dc5be5f
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js
@@ -0,0 +1,37 @@
+import { map } from '../common/map.js';
+import { nullTag } from '../common/null.js';
+import { seq } from '../common/seq.js';
+import { string } from '../common/string.js';
+import { binary } from './binary.js';
+import { trueTag, falseTag } from './bool.js';
+import { floatNaN, floatExp, float } from './float.js';
+import { intBin, intOct, int, intHex } from './int.js';
+import { omap } from './omap.js';
+import { pairs } from './pairs.js';
+import { set } from './set.js';
+import { intTime, floatTime, timestamp } from './timestamp.js';
+
+const schema = [
+ map,
+ seq,
+ string,
+ nullTag,
+ trueTag,
+ falseTag,
+ intBin,
+ intOct,
+ int,
+ intHex,
+ floatNaN,
+ floatExp,
+ float,
+ binary,
+ omap,
+ pairs,
+ set,
+ intTime,
+ floatTime,
+ timestamp
+];
+
+export { schema };
diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/set.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/set.js
new file mode 100644
index 0000000..bee6ac6
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/yaml-1.1/set.js
@@ -0,0 +1,87 @@
+import { isMap, isPair, isScalar } from '../../nodes/Node.js';
+import { createPair, Pair } from '../../nodes/Pair.js';
+import { YAMLMap, findPair } from '../../nodes/YAMLMap.js';
+
+class YAMLSet extends YAMLMap {
+ constructor(schema) {
+ super(schema);
+ this.tag = YAMLSet.tag;
+ }
+ add(key) {
+ let pair;
+ if (isPair(key))
+ pair = key;
+ else if (typeof key === 'object' &&
+ 'key' in key &&
+ 'value' in key &&
+ key.value === null)
+ pair = new Pair(key.key, null);
+ else
+ pair = new Pair(key, null);
+ const prev = findPair(this.items, pair.key);
+ if (!prev)
+ this.items.push(pair);
+ }
+ get(key, keepPair) {
+ const pair = findPair(this.items, key);
+ return !keepPair && isPair(pair)
+ ? isScalar(pair.key)
+ ? pair.key.value
+ : pair.key
+ : pair;
+ }
+ set(key, value) {
+ if (typeof value !== 'boolean')
+ throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);
+ const prev = findPair(this.items, key);
+ if (prev && !value) {
+ this.items.splice(this.items.indexOf(prev), 1);
+ }
+ else if (!prev && value) {
+ this.items.push(new Pair(key));
+ }
+ }
+ toJSON(_, ctx) {
+ return super.toJSON(_, ctx, Set);
+ }
+ toString(ctx, onComment, onChompKeep) {
+ if (!ctx)
+ return JSON.stringify(this);
+ if (this.hasAllNullValues(true))
+ return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);
+ else
+ throw new Error('Set items must all have null values');
+ }
+}
+YAMLSet.tag = 'tag:yaml.org,2002:set';
+const set = {
+ collection: 'map',
+ identify: value => value instanceof Set,
+ nodeClass: YAMLSet,
+ default: false,
+ tag: 'tag:yaml.org,2002:set',
+ resolve(map, onError) {
+ if (isMap(map)) {
+ if (map.hasAllNullValues(true))
+ return Object.assign(new YAMLSet(), map);
+ else
+ onError('Set items must all have null values');
+ }
+ else
+ onError('Expected a mapping for this tag');
+ return map;
+ },
+ createNode(schema, iterable, ctx) {
+ const { replacer } = ctx;
+ const set = new YAMLSet(schema);
+ if (iterable && Symbol.iterator in Object(iterable))
+ for (let value of iterable) {
+ if (typeof replacer === 'function')
+ value = replacer.call(iterable, value, value);
+ set.items.push(createPair(value, null, ctx));
+ }
+ return set;
+ }
+};
+
+export { YAMLSet, set };
diff --git a/node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js b/node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js
new file mode 100644
index 0000000..7013cda
--- /dev/null
+++ b/node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js
@@ -0,0 +1,101 @@
+import { stringifyNumber } from '../../stringify/stringifyNumber.js';
+
+/** Internal types handle bigint as number, because TS can't figure it out. */
+function parseSexagesimal(str, asBigInt) {
+ const sign = str[0];
+ const parts = sign === '-' || sign === '+' ? str.substring(1) : str;
+ const num = (n) => asBigInt ? BigInt(n) : Number(n);
+ const res = parts
+ .replace(/_/g, '')
+ .split(':')
+ .reduce((res, p) => res * num(60) + num(p), num(0));
+ return (sign === '-' ? num(-1) * res : res);
+}
+/**
+ * hhhh:mm:ss.sss
+ *
+ * Internal types handle bigint as number, because TS can't figure it out.
+ */
+function stringifySexagesimal(node) {
+ let { value } = node;
+ let num = (n) => n;
+ if (typeof value === 'bigint')
+ num = n => BigInt(n);
+ else if (isNaN(value) || !isFinite(value))
+ return stringifyNumber(node);
+ let sign = '';
+ if (value < 0) {
+ sign = '-';
+ value *= num(-1);
+ }
+ const _60 = num(60);
+ const parts = [value % _60]; // seconds, including ms
+ if (value < 60) {
+ parts.unshift(0); // at least one : is required
+ }
+ else {
+ value = (value - parts[0]) / _60;
+ parts.unshift(value % _60); // minutes
+ if (value >= 60) {
+ value = (value - parts[0]) / _60;
+ parts.unshift(value); // hours
+ }
+ }
+ return (sign +
+ parts
+ .map(n => (n < 10 ? '0' + String(n) : String(n)))
+ .join(':')
+ .replace(/000000\d*$/, '') // % 60 may introduce error
+ );
+}
+const intTime = {
+ identify: value => typeof value === 'bigint' || Number.isInteger(value),
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'TIME',
+ test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,
+ resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),
+ stringify: stringifySexagesimal
+};
+const floatTime = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ format: 'TIME',
+ test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/,
+ resolve: str => parseSexagesimal(str, false),
+ stringify: stringifySexagesimal
+};
+const timestamp = {
+ identify: value => value instanceof Date,
+ default: true,
+ tag: 'tag:yaml.org,2002:timestamp',
+ // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part
+ // may be omitted altogether, resulting in a date format. In such a case, the time part is
+ // assumed to be 00:00:00Z (start of day, UTC).
+ test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd
+ '(?:' + // time is optional
+ '(?:t|T|[ \\t]+)' + // t | T | whitespace
+ '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?
+ '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30
+ ')?$'),
+ resolve(str) {
+ const match = str.match(timestamp.test);
+ if (!match)
+ throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');
+ const [, year, month, day, hour, minute, second] = match.map(Number);
+ const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;
+ let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);
+ const tz = match[8];
+ if (tz && tz !== 'Z') {
+ let d = parseSexagesimal(tz, false);
+ if (Math.abs(d) < 30)
+ d *= 60;
+ date -= 60000 * d;
+ }
+ return new Date(date);
+ },
+ stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '')
+};
+
+export { floatTime, intTime, timestamp };
diff --git a/node_modules/yaml/browser/dist/stringify/foldFlowLines.js b/node_modules/yaml/browser/dist/stringify/foldFlowLines.js
new file mode 100644
index 0000000..01fe787
--- /dev/null
+++ b/node_modules/yaml/browser/dist/stringify/foldFlowLines.js
@@ -0,0 +1,135 @@
+const FOLD_FLOW = 'flow';
+const FOLD_BLOCK = 'block';
+const FOLD_QUOTED = 'quoted';
+/**
+ * Tries to keep input at up to `lineWidth` characters, splitting only on spaces
+ * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are
+ * terminated with `\n` and started with `indent`.
+ */
+function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {
+ if (!lineWidth || lineWidth < 0)
+ return text;
+ const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);
+ if (text.length <= endStep)
+ return text;
+ const folds = [];
+ const escapedFolds = {};
+ let end = lineWidth - indent.length;
+ if (typeof indentAtStart === 'number') {
+ if (indentAtStart > lineWidth - Math.max(2, minContentWidth))
+ folds.push(0);
+ else
+ end = lineWidth - indentAtStart;
+ }
+ let split = undefined;
+ let prev = undefined;
+ let overflow = false;
+ let i = -1;
+ let escStart = -1;
+ let escEnd = -1;
+ if (mode === FOLD_BLOCK) {
+ i = consumeMoreIndentedLines(text, i);
+ if (i !== -1)
+ end = i + endStep;
+ }
+ for (let ch; (ch = text[(i += 1)]);) {
+ if (mode === FOLD_QUOTED && ch === '\\') {
+ escStart = i;
+ switch (text[i + 1]) {
+ case 'x':
+ i += 3;
+ break;
+ case 'u':
+ i += 5;
+ break;
+ case 'U':
+ i += 9;
+ break;
+ default:
+ i += 1;
+ }
+ escEnd = i;
+ }
+ if (ch === '\n') {
+ if (mode === FOLD_BLOCK)
+ i = consumeMoreIndentedLines(text, i);
+ end = i + endStep;
+ split = undefined;
+ }
+ else {
+ if (ch === ' ' &&
+ prev &&
+ prev !== ' ' &&
+ prev !== '\n' &&
+ prev !== '\t') {
+ // space surrounded by non-space can be replaced with newline + indent
+ const next = text[i + 1];
+ if (next && next !== ' ' && next !== '\n' && next !== '\t')
+ split = i;
+ }
+ if (i >= end) {
+ if (split) {
+ folds.push(split);
+ end = split + endStep;
+ split = undefined;
+ }
+ else if (mode === FOLD_QUOTED) {
+ // white-space collected at end may stretch past lineWidth
+ while (prev === ' ' || prev === '\t') {
+ prev = ch;
+ ch = text[(i += 1)];
+ overflow = true;
+ }
+ // Account for newline escape, but don't break preceding escape
+ const j = i > escEnd + 1 ? i - 2 : escStart - 1;
+ // Bail out if lineWidth & minContentWidth are shorter than an escape string
+ if (escapedFolds[j])
+ return text;
+ folds.push(j);
+ escapedFolds[j] = true;
+ end = j + endStep;
+ split = undefined;
+ }
+ else {
+ overflow = true;
+ }
+ }
+ }
+ prev = ch;
+ }
+ if (overflow && onOverflow)
+ onOverflow();
+ if (folds.length === 0)
+ return text;
+ if (onFold)
+ onFold();
+ let res = text.slice(0, folds[0]);
+ for (let i = 0; i < folds.length; ++i) {
+ const fold = folds[i];
+ const end = folds[i + 1] || text.length;
+ if (fold === 0)
+ res = `\n${indent}${text.slice(0, end)}`;
+ else {
+ if (mode === FOLD_QUOTED && escapedFolds[fold])
+ res += `${text[fold]}\\`;
+ res += `\n${indent}${text.slice(fold + 1, end)}`;
+ }
+ }
+ return res;
+}
+/**
+ * Presumes `i + 1` is at the start of a line
+ * @returns index of last newline in more-indented block
+ */
+function consumeMoreIndentedLines(text, i) {
+ let ch = text[i + 1];
+ while (ch === ' ' || ch === '\t') {
+ do {
+ ch = text[(i += 1)];
+ } while (ch && ch !== '\n');
+ ch = text[i + 1];
+ }
+ return i;
+}
+
+export { FOLD_BLOCK, FOLD_FLOW, FOLD_QUOTED, foldFlowLines };
diff --git a/node_modules/yaml/browser/dist/stringify/stringify.js b/node_modules/yaml/browser/dist/stringify/stringify.js
new file mode 100644
index 0000000..45fa03d
--- /dev/null
+++ b/node_modules/yaml/browser/dist/stringify/stringify.js
@@ -0,0 +1,124 @@
+import { anchorIsValid } from '../doc/anchors.js';
+import { isPair, isAlias, isNode, isScalar, isCollection } from '../nodes/Node.js';
+import { stringifyComment } from './stringifyComment.js';
+import { stringifyString } from './stringifyString.js';
+
+function createStringifyContext(doc, options) {
+ const opt = Object.assign({
+ blockQuote: true,
+ commentString: stringifyComment,
+ defaultKeyType: null,
+ defaultStringType: 'PLAIN',
+ directives: null,
+ doubleQuotedAsJSON: false,
+ doubleQuotedMinMultiLineLength: 40,
+ falseStr: 'false',
+ indentSeq: true,
+ lineWidth: 80,
+ minContentWidth: 20,
+ nullStr: 'null',
+ simpleKeys: false,
+ singleQuote: null,
+ trueStr: 'true',
+ verifyAliasOrder: true
+ }, doc.schema.toStringOptions, options);
+ let inFlow;
+ switch (opt.collectionStyle) {
+ case 'block':
+ inFlow = false;
+ break;
+ case 'flow':
+ inFlow = true;
+ break;
+ default:
+ inFlow = null;
+ }
+ return {
+ anchors: new Set(),
+ doc,
+ indent: '',
+ indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',
+ inFlow,
+ options: opt
+ };
+}
+function getTagObject(tags, item) {
+ var _a, _b, _c, _d;
+ if (item.tag) {
+ const match = tags.filter(t => t.tag === item.tag);
+ if (match.length > 0)
+ return (_a = match.find(t => t.format === item.format)) !== null && _a !== void 0 ? _a : match[0];
+ }
+ let tagObj = undefined;
+ let obj;
+ if (isScalar(item)) {
+ obj = item.value;
+ const match = tags.filter(t => { var _a; return (_a = t.identify) === null || _a === void 0 ? void 0 : _a.call(t, obj); });
+ tagObj =
+ (_b = match.find(t => t.format === item.format)) !== null && _b !== void 0 ? _b : match.find(t => !t.format);
+ }
+ else {
+ obj = item;
+ tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);
+ }
+ if (!tagObj) {
+ const name = (_d = (_c = obj === null || obj === void 0 ? void 0 : obj.constructor) === null || _c === void 0 ? void 0 : _c.name) !== null && _d !== void 0 ? _d : typeof obj;
+ throw new Error(`Tag not resolved for ${name} value`);
+ }
+ return tagObj;
+}
+// needs to be called before value stringifier to allow for circular anchor refs
+function stringifyProps(node, tagObj, { anchors, doc }) {
+ if (!doc.directives)
+ return '';
+ const props = [];
+ const anchor = (isScalar(node) || isCollection(node)) && node.anchor;
+ if (anchor && anchorIsValid(anchor)) {
+ anchors.add(anchor);
+ props.push(`&${anchor}`);
+ }
+ const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;
+ if (tag)
+ props.push(doc.directives.tagString(tag));
+ return props.join(' ');
+}
+function stringify(item, ctx, onComment, onChompKeep) {
+ var _a, _b;
+ if (isPair(item))
+ return item.toString(ctx, onComment, onChompKeep);
+ if (isAlias(item)) {
+ if (ctx.doc.directives)
+ return item.toString(ctx);
+ if ((_a = ctx.resolvedAliases) === null || _a === void 0 ? void 0 : _a.has(item)) {
+ throw new TypeError(`Cannot stringify circular structure without alias nodes`);
+ }
+ else {
+ if (ctx.resolvedAliases)
+ ctx.resolvedAliases.add(item);
+ else
+ ctx.resolvedAliases = new Set([item]);
+ item = item.resolve(ctx.doc);
+ }
+ }
+ let tagObj = undefined;
+ const node = isNode(item)
+ ? item
+ : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });
+ if (!tagObj)
+ tagObj = getTagObject(ctx.doc.schema.tags, node);
+ const props = stringifyProps(node, tagObj, ctx);
+ if (props.length > 0)
+ ctx.indentAtStart = ((_b = ctx.indentAtStart) !== null && _b !== void 0 ? _b : 0) + props.length + 1;
+ const str = typeof tagObj.stringify === 'function'
+ ? tagObj.stringify(node, ctx, onComment, onChompKeep)
+ : isScalar(node)
+ ? stringifyString(node, ctx, onComment, onChompKeep)
+ : node.toString(ctx, onComment, onChompKeep);
+ if (!props)
+ return str;
+ return isScalar(node) || str[0] === '{' || str[0] === '['
+ ? `${props} ${str}`
+ : `${props}\n${ctx.indent}${str}`;
+}
+
+export { createStringifyContext, stringify };
diff --git a/node_modules/yaml/browser/dist/stringify/stringifyCollection.js b/node_modules/yaml/browser/dist/stringify/stringifyCollection.js
new file mode 100644
index 0000000..0a2f0ee
--- /dev/null
+++ b/node_modules/yaml/browser/dist/stringify/stringifyCollection.js
@@ -0,0 +1,152 @@
+import { Collection } from '../nodes/Collection.js';
+import { isNode, isPair } from '../nodes/Node.js';
+import { stringify } from './stringify.js';
+import { lineComment, indentComment } from './stringifyComment.js';
+
+function stringifyCollection(collection, ctx, options) {
+ var _a;
+ const flow = (_a = ctx.inFlow) !== null && _a !== void 0 ? _a : collection.flow;
+ const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;
+ return stringify(collection, ctx, options);
+}
+function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {
+ const { indent, options: { commentString } } = ctx;
+ const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });
+ let chompKeep = false; // flag for the preceding node's status
+ const lines = [];
+ for (let i = 0; i < items.length; ++i) {
+ const item = items[i];
+ let comment = null;
+ if (isNode(item)) {
+ if (!chompKeep && item.spaceBefore)
+ lines.push('');
+ addCommentBefore(ctx, lines, item.commentBefore, chompKeep);
+ if (item.comment)
+ comment = item.comment;
+ }
+ else if (isPair(item)) {
+ const ik = isNode(item.key) ? item.key : null;
+ if (ik) {
+ if (!chompKeep && ik.spaceBefore)
+ lines.push('');
+ addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);
+ }
+ }
+ chompKeep = false;
+ let str = stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true));
+ if (comment)
+ str += lineComment(str, itemIndent, commentString(comment));
+ if (chompKeep && comment)
+ chompKeep = false;
+ lines.push(blockItemPrefix + str);
+ }
+ let str;
+ if (lines.length === 0) {
+ str = flowChars.start + flowChars.end;
+ }
+ else {
+ str = lines[0];
+ for (let i = 1; i < lines.length; ++i) {
+ const line = lines[i];
+ str += line ? `\n${indent}${line}` : '\n';
+ }
+ }
+ if (comment) {
+ str += '\n' + indentComment(commentString(comment), indent);
+ if (onComment)
+ onComment();
+ }
+ else if (chompKeep && onChompKeep)
+ onChompKeep();
+ return str;
+}
+function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {
+ const { indent, indentStep, options: { commentString } } = ctx;
+ itemIndent += indentStep;
+ const itemCtx = Object.assign({}, ctx, {
+ indent: itemIndent,
+ inFlow: true,
+ type: null
+ });
+ let reqNewline = false;
+ let linesAtValue = 0;
+ const lines = [];
+ for (let i = 0; i < items.length; ++i) {
+ const item = items[i];
+ let comment = null;
+ if (isNode(item)) {
+ if (item.spaceBefore)
+ lines.push('');
+ addCommentBefore(ctx, lines, item.commentBefore, false);
+ if (item.comment)
+ comment = item.comment;
+ }
+ else if (isPair(item)) {
+ const ik = isNode(item.key) ? item.key : null;
+ if (ik) {
+ if (ik.spaceBefore)
+ lines.push('');
+ addCommentBefore(ctx, lines, ik.commentBefore, false);
+ if (ik.comment)
+ reqNewline = true;
+ }
+ const iv = isNode(item.value) ? item.value : null;
+ if (iv) {
+ if (iv.comment)
+ comment = iv.comment;
+ if (iv.commentBefore)
+ reqNewline = true;
+ }
+ else if (item.value == null && ik && ik.comment) {
+ comment = ik.comment;
+ }
+ }
+ if (comment)
+ reqNewline = true;
+ let str = stringify(item, itemCtx, () => (comment = null));
+ if (i < items.length - 1)
+ str += ',';
+ if (comment)
+ str += lineComment(str, itemIndent, commentString(comment));
+ if (!reqNewline && (lines.length > linesAtValue || str.includes('\n')))
+ reqNewline = true;
+ lines.push(str);
+ linesAtValue = lines.length;
+ }
+ let str;
+ const { start, end } = flowChars;
+ if (lines.length === 0) {
+ str = start + end;
+ }
+ else {
+ if (!reqNewline) {
+ const len = lines.reduce((sum, line) => sum + line.length + 2, 2);
+ reqNewline = len > Collection.maxFlowStringSingleLineLength;
+ }
+ if (reqNewline) {
+ str = start;
+ for (const line of lines)
+ str += line ? `\n${indentStep}${indent}${line}` : '\n';
+ str += `\n${indent}${end}`;
+ }
+ else {
+ str = `${start} ${lines.join(' ')} ${end}`;
+ }
+ }
+ if (comment) {
+ str += lineComment(str, commentString(comment), indent);
+ if (onComment)
+ onComment();
+ }
+ return str;
+}
+function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {
+ if (comment && chompKeep)
+ comment = comment.replace(/^\n+/, '');
+ if (comment) {
+ const ic = indentComment(commentString(comment), indent);
+ lines.push(ic.trimStart()); // Avoid double indent on first line
+ }
+}
+
+export { stringifyCollection };
diff --git a/node_modules/yaml/browser/dist/stringify/stringifyComment.js b/node_modules/yaml/browser/dist/stringify/stringifyComment.js
new file mode 100644
index 0000000..f16fc91
--- /dev/null
+++ b/node_modules/yaml/browser/dist/stringify/stringifyComment.js
@@ -0,0 +1,20 @@
+/**
+ * Stringifies a comment.
+ *
+ * Empty comment lines are left empty,
+ * lines consisting of a single space are replaced by `#`,
+ * and all other lines are prefixed with a `#`.
+ */
+const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');
+function indentComment(comment, indent) {
+ if (/^\n+$/.test(comment))
+ return comment.substring(1);
+ return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;
+}
+const lineComment = (str, indent, comment) => str.endsWith('\n')
+ ? indentComment(comment, indent)
+ : comment.includes('\n')
+ ? '\n' + indentComment(comment, indent)
+ : (str.endsWith(' ') ? '' : ' ') + comment;
+
+export { indentComment, lineComment, stringifyComment };
diff --git a/node_modules/yaml/browser/dist/stringify/stringifyDocument.js b/node_modules/yaml/browser/dist/stringify/stringifyDocument.js
new file mode 100644
index 0000000..bafad0b
--- /dev/null
+++ b/node_modules/yaml/browser/dist/stringify/stringifyDocument.js
@@ -0,0 +1,86 @@
+import { isNode } from '../nodes/Node.js';
+import { createStringifyContext, stringify } from './stringify.js';
+import { indentComment, lineComment } from './stringifyComment.js';
+
+function stringifyDocument(doc, options) {
+ var _a;
+ const lines = [];
+ let hasDirectives = options.directives === true;
+ if (options.directives !== false && doc.directives) {
+ const dir = doc.directives.toString(doc);
+ if (dir) {
+ lines.push(dir);
+ hasDirectives = true;
+ }
+ else if (doc.directives.docStart)
+ hasDirectives = true;
+ }
+ if (hasDirectives)
+ lines.push('---');
+ const ctx = createStringifyContext(doc, options);
+ const { commentString } = ctx.options;
+ if (doc.commentBefore) {
+ if (lines.length !== 1)
+ lines.unshift('');
+ const cs = commentString(doc.commentBefore);
+ lines.unshift(indentComment(cs, ''));
+ }
+ let chompKeep = false;
+ let contentComment = null;
+ if (doc.contents) {
+ if (isNode(doc.contents)) {
+ if (doc.contents.spaceBefore && hasDirectives)
+ lines.push('');
+ if (doc.contents.commentBefore) {
+ const cs = commentString(doc.contents.commentBefore);
+ lines.push(indentComment(cs, ''));
+ }
+ // top-level block scalars need to be indented if followed by a comment
+ ctx.forceBlockIndent = !!doc.comment;
+ contentComment = doc.contents.comment;
+ }
+ const onChompKeep = contentComment ? undefined : () => (chompKeep = true);
+ let body = stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep);
+ if (contentComment)
+ body += lineComment(body, '', commentString(contentComment));
+ if ((body[0] === '|' || body[0] === '>') &&
+ lines[lines.length - 1] === '---') {
+ // Top-level block scalars with a preceding doc marker ought to use the
+ // same line for their header.
+ lines[lines.length - 1] = `--- ${body}`;
+ }
+ else
+ lines.push(body);
+ }
+ else {
+ lines.push(stringify(doc.contents, ctx));
+ }
+ if ((_a = doc.directives) === null || _a === void 0 ? void 0 : _a.docEnd) {
+ if (doc.comment) {
+ const cs = commentString(doc.comment);
+ if (cs.includes('\n')) {
+ lines.push('...');
+ lines.push(indentComment(cs, ''));
+ }
+ else {
+ lines.push(`... ${cs}`);
+ }
+ }
+ else {
+ lines.push('...');
+ }
+ }
+ else {
+ let dc = doc.comment;
+ if (dc && chompKeep)
+ dc = dc.replace(/^\n+/, '');
+ if (dc) {
+ if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')
+ lines.push('');
+ lines.push(indentComment(commentString(dc), ''));
+ }
+ }
+ return lines.join('\n') + '\n';
+}
+
+export { stringifyDocument };
diff --git a/node_modules/yaml/browser/dist/stringify/stringifyNumber.js b/node_modules/yaml/browser/dist/stringify/stringifyNumber.js
new file mode 100644
index 0000000..3fa35f9
--- /dev/null
+++ b/node_modules/yaml/browser/dist/stringify/stringifyNumber.js
@@ -0,0 +1,24 @@
+function stringifyNumber({ format, minFractionDigits, tag, value }) {
+ if (typeof value === 'bigint')
+ return String(value);
+ const num = typeof value === 'number' ? value : Number(value);
+ if (!isFinite(num))
+ return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';
+ let n = JSON.stringify(value);
+ if (!format &&
+ minFractionDigits &&
+ (!tag || tag === 'tag:yaml.org,2002:float') &&
+ /^\d/.test(n)) {
+ let i = n.indexOf('.');
+ if (i < 0) {
+ i = n.length;
+ n += '.';
+ }
+ let d = minFractionDigits - (n.length - i - 1);
+ while (d-- > 0)
+ n += '0';
+ }
+ return n;
+}
+
+export { stringifyNumber };
diff --git a/node_modules/yaml/browser/dist/stringify/stringifyPair.js b/node_modules/yaml/browser/dist/stringify/stringifyPair.js
new file mode 100644
index 0000000..6243b0e
--- /dev/null
+++ b/node_modules/yaml/browser/dist/stringify/stringifyPair.js
@@ -0,0 +1,125 @@
+import { isCollection, isNode, isScalar, isSeq } from '../nodes/Node.js';
+import { Scalar } from '../nodes/Scalar.js';
+import { stringify } from './stringify.js';
+import { lineComment, indentComment } from './stringifyComment.js';
+
+function stringifyPair({ key, value }, ctx, onComment, onChompKeep) {
+ const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;
+ let keyComment = (isNode(key) && key.comment) || null;
+ if (simpleKeys) {
+ if (keyComment) {
+ throw new Error('With simple keys, key nodes cannot have comments');
+ }
+ if (isCollection(key)) {
+ const msg = 'With simple keys, collection cannot be used as a key value';
+ throw new Error(msg);
+ }
+ }
+ let explicitKey = !simpleKeys &&
+ (!key ||
+ (keyComment && value == null && !ctx.inFlow) ||
+ isCollection(key) ||
+ (isScalar(key)
+ ? key.type === Scalar.BLOCK_FOLDED || key.type === Scalar.BLOCK_LITERAL
+ : typeof key === 'object'));
+ ctx = Object.assign({}, ctx, {
+ allNullValues: false,
+ implicitKey: !explicitKey && (simpleKeys || !allNullValues),
+ indent: indent + indentStep
+ });
+ let keyCommentDone = false;
+ let chompKeep = false;
+ let str = stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));
+ if (!explicitKey && !ctx.inFlow && str.length > 1024) {
+ if (simpleKeys)
+ throw new Error('With simple keys, single line scalar must not span more than 1024 characters');
+ explicitKey = true;
+ }
+ if (ctx.inFlow) {
+ if (allNullValues || value == null) {
+ if (keyCommentDone && onComment)
+ onComment();
+ return str === '' ? '?' : explicitKey ? `? ${str}` : str;
+ }
+ }
+ else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {
+ str = `? ${str}`;
+ if (keyComment && !keyCommentDone) {
+ str += lineComment(str, ctx.indent, commentString(keyComment));
+ }
+ else if (chompKeep && onChompKeep)
+ onChompKeep();
+ return str;
+ }
+ if (keyCommentDone)
+ keyComment = null;
+ if (explicitKey) {
+ if (keyComment)
+ str += lineComment(str, ctx.indent, commentString(keyComment));
+ str = `? ${str}\n${indent}:`;
+ }
+ else {
+ str = `${str}:`;
+ if (keyComment)
+ str += lineComment(str, ctx.indent, commentString(keyComment));
+ }
+ let vcb = '';
+ let valueComment = null;
+ if (isNode(value)) {
+ if (value.spaceBefore)
+ vcb = '\n';
+ if (value.commentBefore) {
+ const cs = commentString(value.commentBefore);
+ vcb += `\n${indentComment(cs, ctx.indent)}`;
+ }
+ valueComment = value.comment;
+ }
+ else if (value && typeof value === 'object') {
+ value = doc.createNode(value);
+ }
+ ctx.implicitKey = false;
+ if (!explicitKey && !keyComment && isScalar(value))
+ ctx.indentAtStart = str.length + 1;
+ chompKeep = false;
+ if (!indentSeq &&
+ indentStep.length >= 2 &&
+ !ctx.inFlow &&
+ !explicitKey &&
+ isSeq(value) &&
+ !value.flow &&
+ !value.tag &&
+ !value.anchor) {
+ // If indentSeq === false, consider '- ' as part of indentation where possible
+ ctx.indent = ctx.indent.substr(2);
+ }
+ let valueCommentDone = false;
+ const valueStr = stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));
+ let ws = ' ';
+ if (vcb || keyComment) {
+ if (valueStr === '' && !ctx.inFlow)
+ ws = vcb === '\n' ? '\n\n' : vcb;
+ else
+ ws = `${vcb}\n${ctx.indent}`;
+ }
+ else if (!explicitKey && isCollection(value)) {
+ const flow = valueStr[0] === '[' || valueStr[0] === '{';
+ if (!flow || valueStr.includes('\n'))
+ ws = `\n${ctx.indent}`;
+ }
+ else if (valueStr === '' || valueStr[0] === '\n')
+ ws = '';
+ str += ws + valueStr;
+ if (ctx.inFlow) {
+ if (valueCommentDone && onComment)
+ onComment();
+ }
+ else if (valueComment && !valueCommentDone) {
+ str += lineComment(str, ctx.indent, commentString(valueComment));
+ }
+ else if (chompKeep && onChompKeep) {
+ onChompKeep();
+ }
+ return str;
+}
+
+export { stringifyPair };
diff --git a/node_modules/yaml/browser/dist/stringify/stringifyString.js b/node_modules/yaml/browser/dist/stringify/stringifyString.js
new file mode 100644
index 0000000..090a222
--- /dev/null
+++ b/node_modules/yaml/browser/dist/stringify/stringifyString.js
@@ -0,0 +1,314 @@
+import { Scalar } from '../nodes/Scalar.js';
+import { foldFlowLines, FOLD_QUOTED, FOLD_FLOW, FOLD_BLOCK } from './foldFlowLines.js';
+
+const getFoldOptions = (ctx) => ({
+ indentAtStart: ctx.indentAtStart,
+ lineWidth: ctx.options.lineWidth,
+ minContentWidth: ctx.options.minContentWidth
+});
+// Also checks for lines starting with %, as parsing the output as YAML 1.1 will
+// presume that's starting a new document.
+const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str);
+function lineLengthOverLimit(str, lineWidth, indentLength) {
+ if (!lineWidth || lineWidth < 0)
+ return false;
+ const limit = lineWidth - indentLength;
+ const strLen = str.length;
+ if (strLen <= limit)
+ return false;
+ for (let i = 0, start = 0; i < strLen; ++i) {
+ if (str[i] === '\n') {
+ if (i - start > limit)
+ return true;
+ start = i + 1;
+ if (strLen - start <= limit)
+ return false;
+ }
+ }
+ return true;
+}
+function doubleQuotedString(value, ctx) {
+ const json = JSON.stringify(value);
+ if (ctx.options.doubleQuotedAsJSON)
+ return json;
+ const { implicitKey } = ctx;
+ const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;
+ const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
+ let str = '';
+ let start = 0;
+ for (let i = 0, ch = json[i]; ch; ch = json[++i]) {
+ if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') {
+ // space before newline needs to be escaped to not be folded
+ str += json.slice(start, i) + '\\ ';
+ i += 1;
+ start = i;
+ ch = '\\';
+ }
+ if (ch === '\\')
+ switch (json[i + 1]) {
+ case 'u':
+ {
+ str += json.slice(start, i);
+ const code = json.substr(i + 2, 4);
+ switch (code) {
+ case '0000':
+ str += '\\0';
+ break;
+ case '0007':
+ str += '\\a';
+ break;
+ case '000b':
+ str += '\\v';
+ break;
+ case '001b':
+ str += '\\e';
+ break;
+ case '0085':
+ str += '\\N';
+ break;
+ case '00a0':
+ str += '\\_';
+ break;
+ case '2028':
+ str += '\\L';
+ break;
+ case '2029':
+ str += '\\P';
+ break;
+ default:
+ if (code.substr(0, 2) === '00')
+ str += '\\x' + code.substr(2);
+ else
+ str += json.substr(i, 6);
+ }
+ i += 5;
+ start = i + 1;
+ }
+ break;
+ case 'n':
+ if (implicitKey ||
+ json[i + 2] === '"' ||
+ json.length < minMultiLineLength) {
+ i += 1;
+ }
+ else {
+ // folding will eat first newline
+ str += json.slice(start, i) + '\n\n';
+ while (json[i + 2] === '\\' &&
+ json[i + 3] === 'n' &&
+ json[i + 4] !== '"') {
+ str += '\n';
+ i += 2;
+ }
+ str += indent;
+ // space after newline needs to be escaped to not be folded
+ if (json[i + 2] === ' ')
+ str += '\\';
+ i += 1;
+ start = i + 1;
+ }
+ break;
+ default:
+ i += 1;
+ }
+ }
+ str = start ? str + json.slice(start) : json;
+ return implicitKey
+ ? str
+ : foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx));
+}
+function singleQuotedString(value, ctx) {
+ if (ctx.options.singleQuote === false ||
+ (ctx.implicitKey && value.includes('\n')) ||
+ /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline
+ )
+ return doubleQuotedString(value, ctx);
+ const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
+ const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'";
+ return ctx.implicitKey
+ ? res
+ : foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx));
+}
+function quotedString(value, ctx) {
+ const { singleQuote } = ctx.options;
+ let qs;
+ if (singleQuote === false)
+ qs = doubleQuotedString;
+ else {
+ const hasDouble = value.includes('"');
+ const hasSingle = value.includes("'");
+ if (hasDouble && !hasSingle)
+ qs = singleQuotedString;
+ else if (hasSingle && !hasDouble)
+ qs = doubleQuotedString;
+ else
+ qs = singleQuote ? singleQuotedString : doubleQuotedString;
+ }
+ return qs(value, ctx);
+}
+function blockString({ comment, type, value }, ctx, onComment, onChompKeep) {
+ const { blockQuote, commentString, lineWidth } = ctx.options;
+ // 1. Block can't end in whitespace unless the last line is non-empty.
+ // 2. Strings consisting of only whitespace are best rendered explicitly.
+ if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) {
+ return quotedString(value, ctx);
+ }
+ const indent = ctx.indent ||
+ (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');
+ const literal = blockQuote === 'literal'
+ ? true
+ : blockQuote === 'folded' || type === Scalar.BLOCK_FOLDED
+ ? false
+ : type === Scalar.BLOCK_LITERAL
+ ? true
+ : !lineLengthOverLimit(value, lineWidth, indent.length);
+ if (!value)
+ return literal ? '|\n' : '>\n';
+ // determine chomping from whitespace at value end
+ let chomp;
+ let endStart;
+ for (endStart = value.length; endStart > 0; --endStart) {
+ const ch = value[endStart - 1];
+ if (ch !== '\n' && ch !== '\t' && ch !== ' ')
+ break;
+ }
+ let end = value.substring(endStart);
+ const endNlPos = end.indexOf('\n');
+ if (endNlPos === -1) {
+ chomp = '-'; // strip
+ }
+ else if (value === end || endNlPos !== end.length - 1) {
+ chomp = '+'; // keep
+ if (onChompKeep)
+ onChompKeep();
+ }
+ else {
+ chomp = ''; // clip
+ }
+ if (end) {
+ value = value.slice(0, -end.length);
+ if (end[end.length - 1] === '\n')
+ end = end.slice(0, -1);
+ end = end.replace(/\n+(?!\n|$)/g, `$&${indent}`);
+ }
+ // determine indent indicator from whitespace at value start
+ let startWithSpace = false;
+ let startEnd;
+ let startNlPos = -1;
+ for (startEnd = 0; startEnd < value.length; ++startEnd) {
+ const ch = value[startEnd];
+ if (ch === ' ')
+ startWithSpace = true;
+ else if (ch === '\n')
+ startNlPos = startEnd;
+ else
+ break;
+ }
+ let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);
+ if (start) {
+ value = value.substring(start.length);
+ start = start.replace(/\n+/g, `$&${indent}`);
+ }
+ const indentSize = indent ? '2' : '1'; // root is at -1
+ let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;
+ if (comment) {
+ header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' '));
+ if (onComment)
+ onComment();
+ }
+ if (literal) {
+ value = value.replace(/\n+/g, `$&${indent}`);
+ return `${header}\n${indent}${start}${value}${end}`;
+ }
+ value = value
+ .replace(/\n+/g, '\n$&')
+ .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
+ // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent
+ .replace(/\n+/g, `$&${indent}`);
+ const body = foldFlowLines(`${start}${value}${end}`, indent, FOLD_BLOCK, getFoldOptions(ctx));
+ return `${header}\n${indent}${body}`;
+}
+function plainString(item, ctx, onComment, onChompKeep) {
+ const { type, value } = item;
+ const { actualString, implicitKey, indent, inFlow } = ctx;
+ if ((implicitKey && /[\n[\]{},]/.test(value)) ||
+ (inFlow && /[[\]{},]/.test(value))) {
+ return quotedString(value, ctx);
+ }
+ if (!value ||
+ /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
+ // not allowed:
+ // - empty string, '-' or '?'
+ // - start with an indicator character (except [?:-]) or /[?-] /
+ // - '\n ', ': ' or ' \n' anywhere
+ // - '#' not preceded by a non-space char
+ // - end with ' ' or ':'
+ return implicitKey || inFlow || !value.includes('\n')
+ ? quotedString(value, ctx)
+ : blockString(item, ctx, onComment, onChompKeep);
+ }
+ if (!implicitKey &&
+ !inFlow &&
+ type !== Scalar.PLAIN &&
+ value.includes('\n')) {
+ // Where allowed & type not set explicitly, prefer block style for multiline strings
+ return blockString(item, ctx, onComment, onChompKeep);
+ }
+ if (indent === '' && containsDocumentMarker(value)) {
+ ctx.forceBlockIndent = true;
+ return blockString(item, ctx, onComment, onChompKeep);
+ }
+ const str = value.replace(/\n+/g, `$&\n${indent}`);
+ // Verify that output will be parsed as a string, as e.g. plain numbers and
+ // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),
+ // and others in v1.1.
+ if (actualString) {
+ const test = (tag) => { var _a; return tag.default && tag.tag !== 'tag:yaml.org,2002:str' && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(str)); };
+ const { compat, tags } = ctx.doc.schema;
+ if (tags.some(test) || (compat === null || compat === void 0 ? void 0 : compat.some(test)))
+ return quotedString(value, ctx);
+ }
+ return implicitKey
+ ? str
+ : foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx));
+}
+function stringifyString(item, ctx, onComment, onChompKeep) {
+ const { implicitKey, inFlow } = ctx;
+ const ss = typeof item.value === 'string'
+ ? item
+ : Object.assign({}, item, { value: String(item.value) });
+ let { type } = item;
+ if (type !== Scalar.QUOTE_DOUBLE) {
+ // force double quotes on control characters & unpaired surrogates
+ if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value))
+ type = Scalar.QUOTE_DOUBLE;
+ }
+ const _stringify = (_type) => {
+ switch (_type) {
+ case Scalar.BLOCK_FOLDED:
+ case Scalar.BLOCK_LITERAL:
+ return implicitKey || inFlow
+ ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers
+ : blockString(ss, ctx, onComment, onChompKeep);
+ case Scalar.QUOTE_DOUBLE:
+ return doubleQuotedString(ss.value, ctx);
+ case Scalar.QUOTE_SINGLE:
+ return singleQuotedString(ss.value, ctx);
+ case Scalar.PLAIN:
+ return plainString(ss, ctx, onComment, onChompKeep);
+ default:
+ return null;
+ }
+ };
+ let res = _stringify(type);
+ if (res === null) {
+ const { defaultKeyType, defaultStringType } = ctx.options;
+ const t = (implicitKey && defaultKeyType) || defaultStringType;
+ res = _stringify(t);
+ if (res === null)
+ throw new Error(`Unsupported default string type ${t}`);
+ }
+ return res;
+}
+
+export { stringifyString };
diff --git a/node_modules/yaml/browser/dist/util.js b/node_modules/yaml/browser/dist/util.js
new file mode 100644
index 0000000..eb104a5
--- /dev/null
+++ b/node_modules/yaml/browser/dist/util.js
@@ -0,0 +1,9 @@
+export { debug, warn } from './log.js';
+export { findPair } from './nodes/YAMLMap.js';
+export { toJS } from './nodes/toJS.js';
+export { map as mapTag } from './schema/common/map.js';
+export { seq as seqTag } from './schema/common/seq.js';
+export { string as stringTag } from './schema/common/string.js';
+export { foldFlowLines } from './stringify/foldFlowLines.js';
+export { stringifyNumber } from './stringify/stringifyNumber.js';
+export { stringifyString } from './stringify/stringifyString.js';
diff --git a/node_modules/yaml/browser/dist/visit.js b/node_modules/yaml/browser/dist/visit.js
new file mode 100644
index 0000000..75cb8f4
--- /dev/null
+++ b/node_modules/yaml/browser/dist/visit.js
@@ -0,0 +1,234 @@
+import { isDocument, isNode, isPair, isCollection, isMap, isSeq, isScalar, isAlias } from './nodes/Node.js';
+
+const BREAK = Symbol('break visit');
+const SKIP = Symbol('skip children');
+const REMOVE = Symbol('remove node');
+/**
+ * Apply a visitor to an AST node or document.
+ *
+ * Walks through the tree (depth-first) starting from `node`, calling a
+ * `visitor` function with three arguments:
+ * - `key`: For sequence values and map `Pair`, the node's index in the
+ * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
+ * `null` for the root node.
+ * - `node`: The current node.
+ * - `path`: The ancestry of the current node.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this node, continue with next
+ * sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current node, then continue with the next one
+ * - `Node`: Replace the current node, then continue by visiting it
+ * - `number`: While iterating the items of a sequence or map, set the index
+ * of the next step. This is useful especially if the index of the current
+ * node has changed.
+ *
+ * If `visitor` is a single function, it will be called with all values
+ * encountered in the tree, including e.g. `null` values. Alternatively,
+ * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
+ * `Alias` and `Scalar` node. To define the same visitor function for more than
+ * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
+ * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
+ * specific defined one will be used for each node.
+ */
+function visit(node, visitor) {
+ const visitor_ = initVisitor(visitor);
+ if (isDocument(node)) {
+ const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));
+ if (cd === REMOVE)
+ node.contents = null;
+ }
+ else
+ visit_(null, node, visitor_, Object.freeze([]));
+}
+// Without the `as symbol` casts, TS declares these in the `visit`
+// namespace using `var`, but then complains about that because
+// `unique symbol` must be `const`.
+/** Terminate visit traversal completely */
+visit.BREAK = BREAK;
+/** Do not visit the children of the current node */
+visit.SKIP = SKIP;
+/** Remove the current node */
+visit.REMOVE = REMOVE;
+function visit_(key, node, visitor, path) {
+ const ctrl = callVisitor(key, node, visitor, path);
+ if (isNode(ctrl) || isPair(ctrl)) {
+ replaceNode(key, path, ctrl);
+ return visit_(key, ctrl, visitor, path);
+ }
+ if (typeof ctrl !== 'symbol') {
+ if (isCollection(node)) {
+ path = Object.freeze(path.concat(node));
+ for (let i = 0; i < node.items.length; ++i) {
+ const ci = visit_(i, node.items[i], visitor, path);
+ if (typeof ci === 'number')
+ i = ci - 1;
+ else if (ci === BREAK)
+ return BREAK;
+ else if (ci === REMOVE) {
+ node.items.splice(i, 1);
+ i -= 1;
+ }
+ }
+ }
+ else if (isPair(node)) {
+ path = Object.freeze(path.concat(node));
+ const ck = visit_('key', node.key, visitor, path);
+ if (ck === BREAK)
+ return BREAK;
+ else if (ck === REMOVE)
+ node.key = null;
+ const cv = visit_('value', node.value, visitor, path);
+ if (cv === BREAK)
+ return BREAK;
+ else if (cv === REMOVE)
+ node.value = null;
+ }
+ }
+ return ctrl;
+}
+/**
+ * Apply an async visitor to an AST node or document.
+ *
+ * Walks through the tree (depth-first) starting from `node`, calling a
+ * `visitor` function with three arguments:
+ * - `key`: For sequence values and map `Pair`, the node's index in the
+ * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
+ * `null` for the root node.
+ * - `node`: The current node.
+ * - `path`: The ancestry of the current node.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `Promise`: Must resolve to one of the following values
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this node, continue with next
+ * sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current node, then continue with the next one
+ * - `Node`: Replace the current node, then continue by visiting it
+ * - `number`: While iterating the items of a sequence or map, set the index
+ * of the next step. This is useful especially if the index of the current
+ * node has changed.
+ *
+ * If `visitor` is a single function, it will be called with all values
+ * encountered in the tree, including e.g. `null` values. Alternatively,
+ * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
+ * `Alias` and `Scalar` node. To define the same visitor function for more than
+ * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
+ * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
+ * specific defined one will be used for each node.
+ */
+async function visitAsync(node, visitor) {
+ const visitor_ = initVisitor(visitor);
+ if (isDocument(node)) {
+ const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));
+ if (cd === REMOVE)
+ node.contents = null;
+ }
+ else
+ await visitAsync_(null, node, visitor_, Object.freeze([]));
+}
+// Without the `as symbol` casts, TS declares these in the `visit`
+// namespace using `var`, but then complains about that because
+// `unique symbol` must be `const`.
+/** Terminate visit traversal completely */
+visitAsync.BREAK = BREAK;
+/** Do not visit the children of the current node */
+visitAsync.SKIP = SKIP;
+/** Remove the current node */
+visitAsync.REMOVE = REMOVE;
+async function visitAsync_(key, node, visitor, path) {
+ const ctrl = await callVisitor(key, node, visitor, path);
+ if (isNode(ctrl) || isPair(ctrl)) {
+ replaceNode(key, path, ctrl);
+ return visitAsync_(key, ctrl, visitor, path);
+ }
+ if (typeof ctrl !== 'symbol') {
+ if (isCollection(node)) {
+ path = Object.freeze(path.concat(node));
+ for (let i = 0; i < node.items.length; ++i) {
+ const ci = await visitAsync_(i, node.items[i], visitor, path);
+ if (typeof ci === 'number')
+ i = ci - 1;
+ else if (ci === BREAK)
+ return BREAK;
+ else if (ci === REMOVE) {
+ node.items.splice(i, 1);
+ i -= 1;
+ }
+ }
+ }
+ else if (isPair(node)) {
+ path = Object.freeze(path.concat(node));
+ const ck = await visitAsync_('key', node.key, visitor, path);
+ if (ck === BREAK)
+ return BREAK;
+ else if (ck === REMOVE)
+ node.key = null;
+ const cv = await visitAsync_('value', node.value, visitor, path);
+ if (cv === BREAK)
+ return BREAK;
+ else if (cv === REMOVE)
+ node.value = null;
+ }
+ }
+ return ctrl;
+}
+function initVisitor(visitor) {
+ if (typeof visitor === 'object' &&
+ (visitor.Collection || visitor.Node || visitor.Value)) {
+ return Object.assign({
+ Alias: visitor.Node,
+ Map: visitor.Node,
+ Scalar: visitor.Node,
+ Seq: visitor.Node
+ }, visitor.Value && {
+ Map: visitor.Value,
+ Scalar: visitor.Value,
+ Seq: visitor.Value
+ }, visitor.Collection && {
+ Map: visitor.Collection,
+ Seq: visitor.Collection
+ }, visitor);
+ }
+ return visitor;
+}
+function callVisitor(key, node, visitor, path) {
+ var _a, _b, _c, _d, _e;
+ if (typeof visitor === 'function')
+ return visitor(key, node, path);
+ if (isMap(node))
+ return (_a = visitor.Map) === null || _a === void 0 ? void 0 : _a.call(visitor, key, node, path);
+ if (isSeq(node))
+ return (_b = visitor.Seq) === null || _b === void 0 ? void 0 : _b.call(visitor, key, node, path);
+ if (isPair(node))
+ return (_c = visitor.Pair) === null || _c === void 0 ? void 0 : _c.call(visitor, key, node, path);
+ if (isScalar(node))
+ return (_d = visitor.Scalar) === null || _d === void 0 ? void 0 : _d.call(visitor, key, node, path);
+ if (isAlias(node))
+ return (_e = visitor.Alias) === null || _e === void 0 ? void 0 : _e.call(visitor, key, node, path);
+ return undefined;
+}
+function replaceNode(key, path, node) {
+ const parent = path[path.length - 1];
+ if (isCollection(parent)) {
+ parent.items[key] = node;
+ }
+ else if (isPair(parent)) {
+ if (key === 'key')
+ parent.key = node;
+ else
+ parent.value = node;
+ }
+ else if (isDocument(parent)) {
+ parent.contents = node;
+ }
+ else {
+ const pt = isAlias(parent) ? 'alias' : 'scalar';
+ throw new Error(`Cannot replace node with ${pt} parent`);
+ }
+}
+
+export { visit, visitAsync };
diff --git a/node_modules/yaml/browser/index.js b/node_modules/yaml/browser/index.js
new file mode 100644
index 0000000..5f73271
--- /dev/null
+++ b/node_modules/yaml/browser/index.js
@@ -0,0 +1,5 @@
+// `export * as default from ...` fails on Webpack v4
+// https://github.com/eemeli/yaml/issues/228
+import * as YAML from './dist/index.js'
+export default YAML
+export * from './dist/index.js'
diff --git a/node_modules/yaml/browser/package.json b/node_modules/yaml/browser/package.json
new file mode 100644
index 0000000..3dbc1ca
--- /dev/null
+++ b/node_modules/yaml/browser/package.json
@@ -0,0 +1,3 @@
+{
+ "type": "module"
+}
diff --git a/node_modules/yaml/dist/compose/compose-collection.d.ts b/node_modules/yaml/dist/compose/compose-collection.d.ts
new file mode 100644
index 0000000..7ba83ac
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-collection.d.ts
@@ -0,0 +1,5 @@
+import { ParsedNode } from '../nodes/Node.js';
+import type { BlockMap, BlockSequence, FlowCollection, SourceToken } from '../parse/cst.js';
+import type { ComposeContext, ComposeNode } from './compose-node.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function composeCollection(CN: ComposeNode, ctx: ComposeContext, token: BlockMap | BlockSequence | FlowCollection, tagToken: SourceToken | null, onError: ComposeErrorHandler): ParsedNode;
diff --git a/node_modules/yaml/dist/compose/compose-collection.js b/node_modules/yaml/dist/compose/compose-collection.js
new file mode 100644
index 0000000..52ad885
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-collection.js
@@ -0,0 +1,61 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var Scalar = require('../nodes/Scalar.js');
+var resolveBlockMap = require('./resolve-block-map.js');
+var resolveBlockSeq = require('./resolve-block-seq.js');
+var resolveFlowCollection = require('./resolve-flow-collection.js');
+
+function composeCollection(CN, ctx, token, tagToken, onError) {
+ let coll;
+ switch (token.type) {
+ case 'block-map': {
+ coll = resolveBlockMap.resolveBlockMap(CN, ctx, token, onError);
+ break;
+ }
+ case 'block-seq': {
+ coll = resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError);
+ break;
+ }
+ case 'flow-collection': {
+ coll = resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError);
+ break;
+ }
+ }
+ if (!tagToken)
+ return coll;
+ const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));
+ if (!tagName)
+ return coll;
+ // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841
+ const Coll = coll.constructor;
+ if (tagName === '!' || tagName === Coll.tagName) {
+ coll.tag = Coll.tagName;
+ return coll;
+ }
+ const expType = Node.isMap(coll) ? 'map' : 'seq';
+ let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName);
+ if (!tag) {
+ const kt = ctx.schema.knownTags[tagName];
+ if (kt && kt.collection === expType) {
+ ctx.schema.tags.push(Object.assign({}, kt, { default: false }));
+ tag = kt;
+ }
+ else {
+ onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);
+ coll.tag = tagName;
+ return coll;
+ }
+ }
+ const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options);
+ const node = Node.isNode(res)
+ ? res
+ : new Scalar.Scalar(res);
+ node.range = coll.range;
+ node.tag = tagName;
+ if (tag === null || tag === void 0 ? void 0 : tag.format)
+ node.format = tag.format;
+ return node;
+}
+
+exports.composeCollection = composeCollection;
diff --git a/node_modules/yaml/dist/compose/compose-doc.d.ts b/node_modules/yaml/dist/compose/compose-doc.d.ts
new file mode 100644
index 0000000..2068f87
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-doc.d.ts
@@ -0,0 +1,6 @@
+import type { Directives } from '../doc/directives.js';
+import { Document } from '../doc/Document.js';
+import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options.js';
+import type * as CST from '../parse/cst.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function composeDoc(options: ParseOptions & DocumentOptions & SchemaOptions, directives: Directives, { offset, start, value, end }: CST.Document, onError: ComposeErrorHandler): Document.Parsed<import("../index.js").ParsedNode>;
diff --git a/node_modules/yaml/dist/compose/compose-doc.js b/node_modules/yaml/dist/compose/compose-doc.js
new file mode 100644
index 0000000..bb6453a
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-doc.js
@@ -0,0 +1,42 @@
+'use strict';
+
+var Document = require('../doc/Document.js');
+var composeNode = require('./compose-node.js');
+var resolveEnd = require('./resolve-end.js');
+var resolveProps = require('./resolve-props.js');
+
+function composeDoc(options, directives, { offset, start, value, end }, onError) {
+ const opts = Object.assign({ directives }, options);
+ const doc = new Document.Document(undefined, opts);
+ const ctx = {
+ atRoot: true,
+ directives: doc.directives,
+ options: doc.options,
+ schema: doc.schema
+ };
+ const props = resolveProps.resolveProps(start, {
+ indicator: 'doc-start',
+ next: value !== null && value !== void 0 ? value : end === null || end === void 0 ? void 0 : end[0],
+ offset,
+ onError,
+ startOnNewline: true
+ });
+ if (props.found) {
+ doc.directives.docStart = true;
+ if (value &&
+ (value.type === 'block-map' || value.type === 'block-seq') &&
+ !props.hasNewline)
+ onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');
+ }
+ doc.contents = value
+ ? composeNode.composeNode(ctx, value, props, onError)
+ : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError);
+ const contentEnd = doc.contents.range[2];
+ const re = resolveEnd.resolveEnd(end, contentEnd, false, onError);
+ if (re.comment)
+ doc.comment = re.comment;
+ doc.range = [offset, contentEnd, re.offset];
+ return doc;
+}
+
+exports.composeDoc = composeDoc;
diff --git a/node_modules/yaml/dist/compose/compose-node.d.ts b/node_modules/yaml/dist/compose/compose-node.d.ts
new file mode 100644
index 0000000..a2f7810
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-node.d.ts
@@ -0,0 +1,26 @@
+import type { Directives } from '../doc/directives.js';
+import type { ParsedNode } from '../nodes/Node.js';
+import type { ParseOptions } from '../options.js';
+import type { SourceToken, Token } from '../parse/cst.js';
+import type { Schema } from '../schema/Schema.js';
+import type { ComposeErrorHandler } from './composer.js';
+export interface ComposeContext {
+ atRoot: boolean;
+ directives: Directives;
+ options: Readonly<Required<Omit<ParseOptions, 'lineCounter'>>>;
+ schema: Readonly<Schema>;
+}
+interface Props {
+ spaceBefore: boolean;
+ comment: string;
+ anchor: SourceToken | null;
+ tag: SourceToken | null;
+}
+declare const CN: {
+ composeNode: typeof composeNode;
+ composeEmptyNode: typeof composeEmptyNode;
+};
+export declare type ComposeNode = typeof CN;
+export declare function composeNode(ctx: ComposeContext, token: Token, props: Props, onError: ComposeErrorHandler): ParsedNode;
+export declare function composeEmptyNode(ctx: ComposeContext, offset: number, before: Token[] | undefined, pos: number | null, { spaceBefore, comment, anchor, tag }: Props, onError: ComposeErrorHandler): import("../index.js").Scalar.Parsed;
+export {};
diff --git a/node_modules/yaml/dist/compose/compose-node.js b/node_modules/yaml/dist/compose/compose-node.js
new file mode 100644
index 0000000..df193ca
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-node.js
@@ -0,0 +1,93 @@
+'use strict';
+
+var Alias = require('../nodes/Alias.js');
+var composeCollection = require('./compose-collection.js');
+var composeScalar = require('./compose-scalar.js');
+var resolveEnd = require('./resolve-end.js');
+var utilEmptyScalarPosition = require('./util-empty-scalar-position.js');
+
+const CN = { composeNode, composeEmptyNode };
+function composeNode(ctx, token, props, onError) {
+ const { spaceBefore, comment, anchor, tag } = props;
+ let node;
+ let isSrcToken = true;
+ switch (token.type) {
+ case 'alias':
+ node = composeAlias(ctx, token, onError);
+ if (anchor || tag)
+ onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');
+ break;
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ case 'block-scalar':
+ node = composeScalar.composeScalar(ctx, token, tag, onError);
+ if (anchor)
+ node.anchor = anchor.source.substring(1);
+ break;
+ case 'block-map':
+ case 'block-seq':
+ case 'flow-collection':
+ node = composeCollection.composeCollection(CN, ctx, token, tag, onError);
+ if (anchor)
+ node.anchor = anchor.source.substring(1);
+ break;
+ default: {
+ const message = token.type === 'error'
+ ? token.message
+ : `Unsupported token (type: ${token.type})`;
+ onError(token, 'UNEXPECTED_TOKEN', message);
+ node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);
+ isSrcToken = false;
+ }
+ }
+ if (anchor && node.anchor === '')
+ onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
+ if (spaceBefore)
+ node.spaceBefore = true;
+ if (comment) {
+ if (token.type === 'scalar' && token.source === '')
+ node.comment = comment;
+ else
+ node.commentBefore = comment;
+ }
+ // @ts-expect-error Type checking misses meaning of isSrcToken
+ if (ctx.options.keepSourceTokens && isSrcToken)
+ node.srcToken = token;
+ return node;
+}
+function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) {
+ const token = {
+ type: 'scalar',
+ offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos),
+ indent: -1,
+ source: ''
+ };
+ const node = composeScalar.composeScalar(ctx, token, tag, onError);
+ if (anchor) {
+ node.anchor = anchor.source.substring(1);
+ if (node.anchor === '')
+ onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
+ }
+ if (spaceBefore)
+ node.spaceBefore = true;
+ if (comment)
+ node.comment = comment;
+ return node;
+}
+function composeAlias({ options }, { offset, source, end }, onError) {
+ const alias = new Alias.Alias(source.substring(1));
+ if (alias.source === '')
+ onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');
+ if (alias.source.endsWith(':'))
+ onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);
+ const valueEnd = offset + source.length;
+ const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError);
+ alias.range = [offset, valueEnd, re.offset];
+ if (re.comment)
+ alias.comment = re.comment;
+ return alias;
+}
+
+exports.composeEmptyNode = composeEmptyNode;
+exports.composeNode = composeNode;
diff --git a/node_modules/yaml/dist/compose/compose-scalar.d.ts b/node_modules/yaml/dist/compose/compose-scalar.d.ts
new file mode 100644
index 0000000..d5d0f79
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-scalar.d.ts
@@ -0,0 +1,5 @@
+import { Scalar } from '../nodes/Scalar.js';
+import type { BlockScalar, FlowScalar, SourceToken } from '../parse/cst.js';
+import type { ComposeContext } from './compose-node.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function composeScalar(ctx: ComposeContext, token: FlowScalar | BlockScalar, tagToken: SourceToken | null, onError: ComposeErrorHandler): Scalar.Parsed;
diff --git a/node_modules/yaml/dist/compose/compose-scalar.js b/node_modules/yaml/dist/compose/compose-scalar.js
new file mode 100644
index 0000000..ad2963c
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-scalar.js
@@ -0,0 +1,83 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var Scalar = require('../nodes/Scalar.js');
+var resolveBlockScalar = require('./resolve-block-scalar.js');
+var resolveFlowScalar = require('./resolve-flow-scalar.js');
+
+function composeScalar(ctx, token, tagToken, onError) {
+ const { value, type, comment, range } = token.type === 'block-scalar'
+ ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError)
+ : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError);
+ const tagName = tagToken
+ ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
+ : null;
+ const tag = tagToken && tagName
+ ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)
+ : token.type === 'scalar'
+ ? findScalarTagByTest(ctx, value, token, onError)
+ : ctx.schema[Node.SCALAR];
+ let scalar;
+ try {
+ const res = tag.resolve(value, msg => onError(tagToken !== null && tagToken !== void 0 ? tagToken : token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
+ scalar = Node.isScalar(res) ? res : new Scalar.Scalar(res);
+ }
+ catch (error) {
+ const msg = error instanceof Error ? error.message : String(error);
+ onError(tagToken !== null && tagToken !== void 0 ? tagToken : token, 'TAG_RESOLVE_FAILED', msg);
+ scalar = new Scalar.Scalar(value);
+ }
+ scalar.range = range;
+ scalar.source = value;
+ if (type)
+ scalar.type = type;
+ if (tagName)
+ scalar.tag = tagName;
+ if (tag.format)
+ scalar.format = tag.format;
+ if (comment)
+ scalar.comment = comment;
+ return scalar;
+}
+function findScalarTagByName(schema, value, tagName, tagToken, onError) {
+ var _a;
+ if (tagName === '!')
+ return schema[Node.SCALAR]; // non-specific tag
+ const matchWithTest = [];
+ for (const tag of schema.tags) {
+ if (!tag.collection && tag.tag === tagName) {
+ if (tag.default && tag.test)
+ matchWithTest.push(tag);
+ else
+ return tag;
+ }
+ }
+ for (const tag of matchWithTest)
+ if ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value))
+ return tag;
+ const kt = schema.knownTags[tagName];
+ if (kt && !kt.collection) {
+ // Ensure that the known tag is available for stringifying,
+ // but does not get used by default.
+ schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));
+ return kt;
+ }
+ onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
+ return schema[Node.SCALAR];
+}
+function findScalarTagByTest({ directives, schema }, value, token, onError) {
+ var _a;
+ const tag = schema.tags.find(tag => { var _a; return tag.default && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value)); }) || schema[Node.SCALAR];
+ if (schema.compat) {
+ const compat = (_a = schema.compat.find(tag => { var _a; return tag.default && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value)); })) !== null && _a !== void 0 ? _a : schema[Node.SCALAR];
+ if (tag.tag !== compat.tag) {
+ const ts = directives.tagString(tag.tag);
+ const cs = directives.tagString(compat.tag);
+ const msg = `Value may be parsed as either ${ts} or ${cs}`;
+ onError(token, 'TAG_RESOLVE_FAILED', msg, true);
+ }
+ }
+ return tag;
+}
+
+exports.composeScalar = composeScalar;
diff --git a/node_modules/yaml/dist/compose/composer.d.ts b/node_modules/yaml/dist/compose/composer.d.ts
new file mode 100644
index 0000000..9db2477
--- /dev/null
+++ b/node_modules/yaml/dist/compose/composer.d.ts
@@ -0,0 +1,62 @@
+import { Directives } from '../doc/directives.js';
+import { Document } from '../doc/Document.js';
+import { ErrorCode, YAMLParseError, YAMLWarning } from '../errors.js';
+import { Range } from '../nodes/Node.js';
+import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options.js';
+import type { Token } from '../parse/cst.js';
+declare type ErrorSource = number | [number, number] | Range | {
+ offset: number;
+ source?: string;
+};
+export declare type ComposeErrorHandler = (source: ErrorSource, code: ErrorCode, message: string, warning?: boolean) => void;
+/**
+ * Compose a stream of CST nodes into a stream of YAML Documents.
+ *
+ * ```ts
+ * import { Composer, Parser } from 'yaml'
+ *
+ * const src: string = ...
+ * const tokens = new Parser().parse(src)
+ * const docs = new Composer().compose(tokens)
+ * ```
+ */
+export declare class Composer {
+ private directives;
+ private doc;
+ private options;
+ private atDirectives;
+ private prelude;
+ private errors;
+ private warnings;
+ constructor(options?: ParseOptions & DocumentOptions & SchemaOptions);
+ private onError;
+ private decorate;
+ /**
+ * Current stream status information.
+ *
+ * Mostly useful at the end of input for an empty stream.
+ */
+ streamInfo(): {
+ comment: string;
+ directives: Directives;
+ errors: YAMLParseError[];
+ warnings: YAMLWarning[];
+ };
+ /**
+ * Compose tokens into documents.
+ *
+ * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
+ * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
+ */
+ compose(tokens: Iterable<Token>, forceDoc?: boolean, endOffset?: number): Generator<Document.Parsed<import("../nodes/Node.js").ParsedNode>, void, unknown>;
+ /** Advance the composer by one CST token. */
+ next(token: Token): Generator<Document.Parsed<import("../nodes/Node.js").ParsedNode>, void, unknown>;
+ /**
+ * Call at end of input to yield any remaining document.
+ *
+ * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
+ * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
+ */
+ end(forceDoc?: boolean, endOffset?: number): Generator<Document.Parsed<import("../nodes/Node.js").ParsedNode>, void, unknown>;
+}
+export {};
diff --git a/node_modules/yaml/dist/compose/composer.js b/node_modules/yaml/dist/compose/composer.js
new file mode 100644
index 0000000..2477a49
--- /dev/null
+++ b/node_modules/yaml/dist/compose/composer.js
@@ -0,0 +1,222 @@
+'use strict';
+
+var directives = require('../doc/directives.js');
+var Document = require('../doc/Document.js');
+var errors = require('../errors.js');
+var Node = require('../nodes/Node.js');
+var composeDoc = require('./compose-doc.js');
+var resolveEnd = require('./resolve-end.js');
+
+function getErrorPos(src) {
+ if (typeof src === 'number')
+ return [src, src + 1];
+ if (Array.isArray(src))
+ return src.length === 2 ? src : [src[0], src[1]];
+ const { offset, source } = src;
+ return [offset, offset + (typeof source === 'string' ? source.length : 1)];
+}
+function parsePrelude(prelude) {
+ var _a;
+ let comment = '';
+ let atComment = false;
+ let afterEmptyLine = false;
+ for (let i = 0; i < prelude.length; ++i) {
+ const source = prelude[i];
+ switch (source[0]) {
+ case '#':
+ comment +=
+ (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') +
+ (source.substring(1) || ' ');
+ atComment = true;
+ afterEmptyLine = false;
+ break;
+ case '%':
+ if (((_a = prelude[i + 1]) === null || _a === void 0 ? void 0 : _a[0]) !== '#')
+ i += 1;
+ atComment = false;
+ break;
+ default:
+ // This may be wrong after doc-end, but in that case it doesn't matter
+ if (!atComment)
+ afterEmptyLine = true;
+ atComment = false;
+ }
+ }
+ return { comment, afterEmptyLine };
+}
+/**
+ * Compose a stream of CST nodes into a stream of YAML Documents.
+ *
+ * ```ts
+ * import { Composer, Parser } from 'yaml'
+ *
+ * const src: string = ...
+ * const tokens = new Parser().parse(src)
+ * const docs = new Composer().compose(tokens)
+ * ```
+ */
+class Composer {
+ constructor(options = {}) {
+ this.doc = null;
+ this.atDirectives = false;
+ this.prelude = [];
+ this.errors = [];
+ this.warnings = [];
+ this.onError = (source, code, message, warning) => {
+ const pos = getErrorPos(source);
+ if (warning)
+ this.warnings.push(new errors.YAMLWarning(pos, code, message));
+ else
+ this.errors.push(new errors.YAMLParseError(pos, code, message));
+ };
+ // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
+ this.directives = new directives.Directives({ version: options.version || '1.2' });
+ this.options = options;
+ }
+ decorate(doc, afterDoc) {
+ const { comment, afterEmptyLine } = parsePrelude(this.prelude);
+ //console.log({ dc: doc.comment, prelude, comment })
+ if (comment) {
+ const dc = doc.contents;
+ if (afterDoc) {
+ doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment;
+ }
+ else if (afterEmptyLine || doc.directives.docStart || !dc) {
+ doc.commentBefore = comment;
+ }
+ else if (Node.isCollection(dc) && !dc.flow && dc.items.length > 0) {
+ let it = dc.items[0];
+ if (Node.isPair(it))
+ it = it.key;
+ const cb = it.commentBefore;
+ it.commentBefore = cb ? `${comment}\n${cb}` : comment;
+ }
+ else {
+ const cb = dc.commentBefore;
+ dc.commentBefore = cb ? `${comment}\n${cb}` : comment;
+ }
+ }
+ if (afterDoc) {
+ Array.prototype.push.apply(doc.errors, this.errors);
+ Array.prototype.push.apply(doc.warnings, this.warnings);
+ }
+ else {
+ doc.errors = this.errors;
+ doc.warnings = this.warnings;
+ }
+ this.prelude = [];
+ this.errors = [];
+ this.warnings = [];
+ }
+ /**
+ * Current stream status information.
+ *
+ * Mostly useful at the end of input for an empty stream.
+ */
+ streamInfo() {
+ return {
+ comment: parsePrelude(this.prelude).comment,
+ directives: this.directives,
+ errors: this.errors,
+ warnings: this.warnings
+ };
+ }
+ /**
+ * Compose tokens into documents.
+ *
+ * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
+ * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
+ */
+ *compose(tokens, forceDoc = false, endOffset = -1) {
+ for (const token of tokens)
+ yield* this.next(token);
+ yield* this.end(forceDoc, endOffset);
+ }
+ /** Advance the composer by one CST token. */
+ *next(token) {
+ if (process.env.LOG_STREAM)
+ console.dir(token, { depth: null });
+ switch (token.type) {
+ case 'directive':
+ this.directives.add(token.source, (offset, message, warning) => {
+ const pos = getErrorPos(token);
+ pos[0] += offset;
+ this.onError(pos, 'BAD_DIRECTIVE', message, warning);
+ });
+ this.prelude.push(token.source);
+ this.atDirectives = true;
+ break;
+ case 'document': {
+ const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError);
+ if (this.atDirectives && !doc.directives.docStart)
+ this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');
+ this.decorate(doc, false);
+ if (this.doc)
+ yield this.doc;
+ this.doc = doc;
+ this.atDirectives = false;
+ break;
+ }
+ case 'byte-order-mark':
+ case 'space':
+ break;
+ case 'comment':
+ case 'newline':
+ this.prelude.push(token.source);
+ break;
+ case 'error': {
+ const msg = token.source
+ ? `${token.message}: ${JSON.stringify(token.source)}`
+ : token.message;
+ const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);
+ if (this.atDirectives || !this.doc)
+ this.errors.push(error);
+ else
+ this.doc.errors.push(error);
+ break;
+ }
+ case 'doc-end': {
+ if (!this.doc) {
+ const msg = 'Unexpected doc-end without preceding document';
+ this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));
+ break;
+ }
+ this.doc.directives.docEnd = true;
+ const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);
+ this.decorate(this.doc, true);
+ if (end.comment) {
+ const dc = this.doc.comment;
+ this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment;
+ }
+ this.doc.range[2] = end.offset;
+ break;
+ }
+ default:
+ this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));
+ }
+ }
+ /**
+ * Call at end of input to yield any remaining document.
+ *
+ * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
+ * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
+ */
+ *end(forceDoc = false, endOffset = -1) {
+ if (this.doc) {
+ this.decorate(this.doc, true);
+ yield this.doc;
+ this.doc = null;
+ }
+ else if (forceDoc) {
+ const opts = Object.assign({ directives: this.directives }, this.options);
+ const doc = new Document.Document(undefined, opts);
+ if (this.atDirectives)
+ this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');
+ doc.range = [0, endOffset, endOffset];
+ this.decorate(doc, false);
+ yield doc;
+ }
+ }
+}
+
+exports.Composer = Composer;
diff --git a/node_modules/yaml/dist/compose/resolve-block-map.d.ts b/node_modules/yaml/dist/compose/resolve-block-map.d.ts
new file mode 100644
index 0000000..dbcd3fa
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-block-map.d.ts
@@ -0,0 +1,6 @@
+import type { ParsedNode } from '../nodes/Node.js';
+import { YAMLMap } from '../nodes/YAMLMap.js';
+import type { BlockMap } from '../parse/cst.js';
+import type { ComposeContext, ComposeNode } from './compose-node.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function resolveBlockMap({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bm: BlockMap, onError: ComposeErrorHandler): YAMLMap.Parsed<ParsedNode, ParsedNode | null>;
diff --git a/node_modules/yaml/dist/compose/resolve-block-map.js b/node_modules/yaml/dist/compose/resolve-block-map.js
new file mode 100644
index 0000000..c3fe91a
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-block-map.js
@@ -0,0 +1,109 @@
+'use strict';
+
+var Pair = require('../nodes/Pair.js');
+var YAMLMap = require('../nodes/YAMLMap.js');
+var resolveProps = require('./resolve-props.js');
+var utilContainsNewline = require('./util-contains-newline.js');
+var utilFlowIndentCheck = require('./util-flow-indent-check.js');
+var utilMapIncludes = require('./util-map-includes.js');
+
+const startColMsg = 'All mapping items must start at the same column';
+function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) {
+ var _a;
+ const map = new YAMLMap.YAMLMap(ctx.schema);
+ if (ctx.atRoot)
+ ctx.atRoot = false;
+ let offset = bm.offset;
+ for (const collItem of bm.items) {
+ const { start, key, sep, value } = collItem;
+ // key properties
+ const keyProps = resolveProps.resolveProps(start, {
+ indicator: 'explicit-key-ind',
+ next: key !== null && key !== void 0 ? key : sep === null || sep === void 0 ? void 0 : sep[0],
+ offset,
+ onError,
+ startOnNewline: true
+ });
+ const implicitKey = !keyProps.found;
+ if (implicitKey) {
+ if (key) {
+ if (key.type === 'block-seq')
+ onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');
+ else if ('indent' in key && key.indent !== bm.indent)
+ onError(offset, 'BAD_INDENT', startColMsg);
+ }
+ if (!keyProps.anchor && !keyProps.tag && !sep) {
+ // TODO: assert being at last item?
+ if (keyProps.comment) {
+ if (map.comment)
+ map.comment += '\n' + keyProps.comment;
+ else
+ map.comment = keyProps.comment;
+ }
+ continue;
+ }
+ }
+ else if (((_a = keyProps.found) === null || _a === void 0 ? void 0 : _a.indent) !== bm.indent)
+ onError(offset, 'BAD_INDENT', startColMsg);
+ if (implicitKey && utilContainsNewline.containsNewline(key))
+ onError(key, // checked by containsNewline()
+ 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');
+ // key value
+ const keyStart = keyProps.end;
+ const keyNode = key
+ ? composeNode(ctx, key, keyProps, onError)
+ : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
+ if (ctx.schema.compat)
+ utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);
+ if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))
+ onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
+ // value properties
+ const valueProps = resolveProps.resolveProps(sep !== null && sep !== void 0 ? sep : [], {
+ indicator: 'map-value-ind',
+ next: value,
+ offset: keyNode.range[2],
+ onError,
+ startOnNewline: !key || key.type === 'block-scalar'
+ });
+ offset = valueProps.end;
+ if (valueProps.found) {
+ if (implicitKey) {
+ if ((value === null || value === void 0 ? void 0 : value.type) === 'block-map' && !valueProps.hasNewline)
+ onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');
+ if (ctx.options.strict &&
+ keyProps.start < valueProps.found.offset - 1024)
+ onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');
+ }
+ // value value
+ const valueNode = value
+ ? composeNode(ctx, value, valueProps, onError)
+ : composeEmptyNode(ctx, offset, sep, null, valueProps, onError);
+ if (ctx.schema.compat)
+ utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError);
+ offset = valueNode.range[2];
+ const pair = new Pair.Pair(keyNode, valueNode);
+ if (ctx.options.keepSourceTokens)
+ pair.srcToken = collItem;
+ map.items.push(pair);
+ }
+ else {
+ // key with no value
+ if (implicitKey)
+ onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');
+ if (valueProps.comment) {
+ if (keyNode.comment)
+ keyNode.comment += '\n' + valueProps.comment;
+ else
+ keyNode.comment = valueProps.comment;
+ }
+ const pair = new Pair.Pair(keyNode);
+ if (ctx.options.keepSourceTokens)
+ pair.srcToken = collItem;
+ map.items.push(pair);
+ }
+ }
+ map.range = [bm.offset, offset, offset];
+ return map;
+}
+
+exports.resolveBlockMap = resolveBlockMap;
diff --git a/node_modules/yaml/dist/compose/resolve-block-scalar.d.ts b/node_modules/yaml/dist/compose/resolve-block-scalar.d.ts
new file mode 100644
index 0000000..4855b19
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-block-scalar.d.ts
@@ -0,0 +1,10 @@
+import { Range } from '../nodes/Node.js';
+import { Scalar } from '../nodes/Scalar.js';
+import type { BlockScalar } from '../parse/cst.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function resolveBlockScalar(scalar: BlockScalar, strict: boolean, onError: ComposeErrorHandler): {
+ value: string;
+ type: Scalar.BLOCK_FOLDED | Scalar.BLOCK_LITERAL | null;
+ comment: string;
+ range: Range;
+};
diff --git a/node_modules/yaml/dist/compose/resolve-block-scalar.js b/node_modules/yaml/dist/compose/resolve-block-scalar.js
new file mode 100644
index 0000000..b5a350d
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-block-scalar.js
@@ -0,0 +1,196 @@
+'use strict';
+
+var Scalar = require('../nodes/Scalar.js');
+
+function resolveBlockScalar(scalar, strict, onError) {
+ const start = scalar.offset;
+ const header = parseBlockScalarHeader(scalar, strict, onError);
+ if (!header)
+ return { value: '', type: null, comment: '', range: [start, start, start] };
+ const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL;
+ const lines = scalar.source ? splitLines(scalar.source) : [];
+ // determine the end of content & start of chomping
+ let chompStart = lines.length;
+ for (let i = lines.length - 1; i >= 0; --i) {
+ const content = lines[i][1];
+ if (content === '' || content === '\r')
+ chompStart = i;
+ else
+ break;
+ }
+ // shortcut for empty contents
+ if (chompStart === 0) {
+ const value = header.chomp === '+' && lines.length > 0
+ ? '\n'.repeat(Math.max(1, lines.length - 1))
+ : '';
+ let end = start + header.length;
+ if (scalar.source)
+ end += scalar.source.length;
+ return { value, type, comment: header.comment, range: [start, end, end] };
+ }
+ // find the indentation level to trim from start
+ let trimIndent = scalar.indent + header.indent;
+ let offset = scalar.offset + header.length;
+ let contentStart = 0;
+ for (let i = 0; i < chompStart; ++i) {
+ const [indent, content] = lines[i];
+ if (content === '' || content === '\r') {
+ if (header.indent === 0 && indent.length > trimIndent)
+ trimIndent = indent.length;
+ }
+ else {
+ if (indent.length < trimIndent) {
+ const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';
+ onError(offset + indent.length, 'MISSING_CHAR', message);
+ }
+ if (header.indent === 0)
+ trimIndent = indent.length;
+ contentStart = i;
+ break;
+ }
+ offset += indent.length + content.length + 1;
+ }
+ // include trailing more-indented empty lines in content
+ for (let i = lines.length - 1; i >= chompStart; --i) {
+ if (lines[i][0].length > trimIndent)
+ chompStart = i + 1;
+ }
+ let value = '';
+ let sep = '';
+ let prevMoreIndented = false;
+ // leading whitespace is kept intact
+ for (let i = 0; i < contentStart; ++i)
+ value += lines[i][0].slice(trimIndent) + '\n';
+ for (let i = contentStart; i < chompStart; ++i) {
+ let [indent, content] = lines[i];
+ offset += indent.length + content.length + 1;
+ const crlf = content[content.length - 1] === '\r';
+ if (crlf)
+ content = content.slice(0, -1);
+ /* istanbul ignore if already caught in lexer */
+ if (content && indent.length < trimIndent) {
+ const src = header.indent
+ ? 'explicit indentation indicator'
+ : 'first line';
+ const message = `Block scalar lines must not be less indented than their ${src}`;
+ onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);
+ indent = '';
+ }
+ if (type === Scalar.Scalar.BLOCK_LITERAL) {
+ value += sep + indent.slice(trimIndent) + content;
+ sep = '\n';
+ }
+ else if (indent.length > trimIndent || content[0] === '\t') {
+ // more-indented content within a folded block
+ if (sep === ' ')
+ sep = '\n';
+ else if (!prevMoreIndented && sep === '\n')
+ sep = '\n\n';
+ value += sep + indent.slice(trimIndent) + content;
+ sep = '\n';
+ prevMoreIndented = true;
+ }
+ else if (content === '') {
+ // empty line
+ if (sep === '\n')
+ value += '\n';
+ else
+ sep = '\n';
+ }
+ else {
+ value += sep + content;
+ sep = ' ';
+ prevMoreIndented = false;
+ }
+ }
+ switch (header.chomp) {
+ case '-':
+ break;
+ case '+':
+ for (let i = chompStart; i < lines.length; ++i)
+ value += '\n' + lines[i][0].slice(trimIndent);
+ if (value[value.length - 1] !== '\n')
+ value += '\n';
+ break;
+ default:
+ value += '\n';
+ }
+ const end = start + header.length + scalar.source.length;
+ return { value, type, comment: header.comment, range: [start, end, end] };
+}
+function parseBlockScalarHeader({ offset, props }, strict, onError) {
+ /* istanbul ignore if should not happen */
+ if (props[0].type !== 'block-scalar-header') {
+ onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');
+ return null;
+ }
+ const { source } = props[0];
+ const mode = source[0];
+ let indent = 0;
+ let chomp = '';
+ let error = -1;
+ for (let i = 1; i < source.length; ++i) {
+ const ch = source[i];
+ if (!chomp && (ch === '-' || ch === '+'))
+ chomp = ch;
+ else {
+ const n = Number(ch);
+ if (!indent && n)
+ indent = n;
+ else if (error === -1)
+ error = offset + i;
+ }
+ }
+ if (error !== -1)
+ onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);
+ let hasSpace = false;
+ let comment = '';
+ let length = source.length;
+ for (let i = 1; i < props.length; ++i) {
+ const token = props[i];
+ switch (token.type) {
+ case 'space':
+ hasSpace = true;
+ // fallthrough
+ case 'newline':
+ length += token.source.length;
+ break;
+ case 'comment':
+ if (strict && !hasSpace) {
+ const message = 'Comments must be separated from other tokens by white space characters';
+ onError(token, 'MISSING_CHAR', message);
+ }
+ length += token.source.length;
+ comment = token.source.substring(1);
+ break;
+ case 'error':
+ onError(token, 'UNEXPECTED_TOKEN', token.message);
+ length += token.source.length;
+ break;
+ /* istanbul ignore next should not happen */
+ default: {
+ const message = `Unexpected token in block scalar header: ${token.type}`;
+ onError(token, 'UNEXPECTED_TOKEN', message);
+ const ts = token.source;
+ if (ts && typeof ts === 'string')
+ length += ts.length;
+ }
+ }
+ }
+ return { mode, indent, chomp, comment, length };
+}
+/** @returns Array of lines split up as `[indent, content]` */
+function splitLines(source) {
+ const split = source.split(/\n( *)/);
+ const first = split[0];
+ const m = first.match(/^( *)/);
+ const line0 = (m === null || m === void 0 ? void 0 : m[1])
+ ? [m[1], first.slice(m[1].length)]
+ : ['', first];
+ const lines = [line0];
+ for (let i = 1; i < split.length; i += 2)
+ lines.push([split[i], split[i + 1]]);
+ return lines;
+}
+
+exports.resolveBlockScalar = resolveBlockScalar;
diff --git a/node_modules/yaml/dist/compose/resolve-block-seq.d.ts b/node_modules/yaml/dist/compose/resolve-block-seq.d.ts
new file mode 100644
index 0000000..25ed558
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-block-seq.d.ts
@@ -0,0 +1,5 @@
+import { YAMLSeq } from '../nodes/YAMLSeq.js';
+import type { BlockSequence } from '../parse/cst.js';
+import type { ComposeContext, ComposeNode } from './compose-node.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function resolveBlockSeq({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bs: BlockSequence, onError: ComposeErrorHandler): YAMLSeq.Parsed<import("../index.js").ParsedNode>;
diff --git a/node_modules/yaml/dist/compose/resolve-block-seq.js b/node_modules/yaml/dist/compose/resolve-block-seq.js
new file mode 100644
index 0000000..1e7e86a
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-block-seq.js
@@ -0,0 +1,47 @@
+'use strict';
+
+var YAMLSeq = require('../nodes/YAMLSeq.js');
+var resolveProps = require('./resolve-props.js');
+var utilFlowIndentCheck = require('./util-flow-indent-check.js');
+
+function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) {
+ const seq = new YAMLSeq.YAMLSeq(ctx.schema);
+ if (ctx.atRoot)
+ ctx.atRoot = false;
+ let offset = bs.offset;
+ for (const { start, value } of bs.items) {
+ const props = resolveProps.resolveProps(start, {
+ indicator: 'seq-item-ind',
+ next: value,
+ offset,
+ onError,
+ startOnNewline: true
+ });
+ offset = props.end;
+ if (!props.found) {
+ if (props.anchor || props.tag || value) {
+ if (value && value.type === 'block-seq')
+ onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column');
+ else
+ onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');
+ }
+ else {
+ // TODO: assert being at last item?
+ if (props.comment)
+ seq.comment = props.comment;
+ continue;
+ }
+ }
+ const node = value
+ ? composeNode(ctx, value, props, onError)
+ : composeEmptyNode(ctx, offset, start, null, props, onError);
+ if (ctx.schema.compat)
+ utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError);
+ offset = node.range[2];
+ seq.items.push(node);
+ }
+ seq.range = [bs.offset, offset, offset];
+ return seq;
+}
+
+exports.resolveBlockSeq = resolveBlockSeq;
diff --git a/node_modules/yaml/dist/compose/resolve-end.d.ts b/node_modules/yaml/dist/compose/resolve-end.d.ts
new file mode 100644
index 0000000..bb2d0b8
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-end.d.ts
@@ -0,0 +1,6 @@
+import type { SourceToken } from '../parse/cst.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function resolveEnd(end: SourceToken[] | undefined, offset: number, reqSpace: boolean, onError: ComposeErrorHandler): {
+ comment: string;
+ offset: number;
+};
diff --git a/node_modules/yaml/dist/compose/resolve-end.js b/node_modules/yaml/dist/compose/resolve-end.js
new file mode 100644
index 0000000..3a58347
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-end.js
@@ -0,0 +1,39 @@
+'use strict';
+
+function resolveEnd(end, offset, reqSpace, onError) {
+ let comment = '';
+ if (end) {
+ let hasSpace = false;
+ let sep = '';
+ for (const token of end) {
+ const { source, type } = token;
+ switch (type) {
+ case 'space':
+ hasSpace = true;
+ break;
+ case 'comment': {
+ if (reqSpace && !hasSpace)
+ onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
+ const cb = source.substring(1) || ' ';
+ if (!comment)
+ comment = cb;
+ else
+ comment += sep + cb;
+ sep = '';
+ break;
+ }
+ case 'newline':
+ if (comment)
+ sep += source;
+ hasSpace = true;
+ break;
+ default:
+ onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);
+ }
+ offset += source.length;
+ }
+ }
+ return { comment, offset };
+}
+
+exports.resolveEnd = resolveEnd;
diff --git a/node_modules/yaml/dist/compose/resolve-flow-collection.d.ts b/node_modules/yaml/dist/compose/resolve-flow-collection.d.ts
new file mode 100644
index 0000000..06d32f5
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-flow-collection.d.ts
@@ -0,0 +1,6 @@
+import { YAMLMap } from '../nodes/YAMLMap.js';
+import { YAMLSeq } from '../nodes/YAMLSeq.js';
+import type { FlowCollection } from '../parse/cst.js';
+import type { ComposeContext, ComposeNode } from './compose-node.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function resolveFlowCollection({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, fc: FlowCollection, onError: ComposeErrorHandler): YAMLMap.Parsed<import("../nodes/Node.js").ParsedNode, import("../nodes/Node.js").ParsedNode | null> | YAMLSeq.Parsed<import("../nodes/Node.js").ParsedNode>;
diff --git a/node_modules/yaml/dist/compose/resolve-flow-collection.js b/node_modules/yaml/dist/compose/resolve-flow-collection.js
new file mode 100644
index 0000000..cf848d9
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-flow-collection.js
@@ -0,0 +1,203 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var Pair = require('../nodes/Pair.js');
+var YAMLMap = require('../nodes/YAMLMap.js');
+var YAMLSeq = require('../nodes/YAMLSeq.js');
+var resolveEnd = require('./resolve-end.js');
+var resolveProps = require('./resolve-props.js');
+var utilContainsNewline = require('./util-contains-newline.js');
+var utilMapIncludes = require('./util-map-includes.js');
+
+const blockMsg = 'Block collections are not allowed within flow collections';
+const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');
+function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) {
+ var _a;
+ const isMap = fc.start.source === '{';
+ const fcName = isMap ? 'flow map' : 'flow sequence';
+ const coll = isMap
+ ? new YAMLMap.YAMLMap(ctx.schema)
+ : new YAMLSeq.YAMLSeq(ctx.schema);
+ coll.flow = true;
+ const atRoot = ctx.atRoot;
+ if (atRoot)
+ ctx.atRoot = false;
+ let offset = fc.offset + fc.start.source.length;
+ for (let i = 0; i < fc.items.length; ++i) {
+ const collItem = fc.items[i];
+ const { start, key, sep, value } = collItem;
+ const props = resolveProps.resolveProps(start, {
+ flow: fcName,
+ indicator: 'explicit-key-ind',
+ next: key !== null && key !== void 0 ? key : sep === null || sep === void 0 ? void 0 : sep[0],
+ offset,
+ onError,
+ startOnNewline: false
+ });
+ if (!props.found) {
+ if (!props.anchor && !props.tag && !sep && !value) {
+ if (i === 0 && props.comma)
+ onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
+ else if (i < fc.items.length - 1)
+ onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);
+ if (props.comment) {
+ if (coll.comment)
+ coll.comment += '\n' + props.comment;
+ else
+ coll.comment = props.comment;
+ }
+ offset = props.end;
+ continue;
+ }
+ if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key))
+ onError(key, // checked by containsNewline()
+ 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
+ }
+ if (i === 0) {
+ if (props.comma)
+ onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
+ }
+ else {
+ if (!props.comma)
+ onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);
+ if (props.comment) {
+ let prevItemComment = '';
+ loop: for (const st of start) {
+ switch (st.type) {
+ case 'comma':
+ case 'space':
+ break;
+ case 'comment':
+ prevItemComment = st.source.substring(1);
+ break loop;
+ default:
+ break loop;
+ }
+ }
+ if (prevItemComment) {
+ let prev = coll.items[coll.items.length - 1];
+ if (Node.isPair(prev))
+ prev = (_a = prev.value) !== null && _a !== void 0 ? _a : prev.key;
+ if (prev.comment)
+ prev.comment += '\n' + prevItemComment;
+ else
+ prev.comment = prevItemComment;
+ props.comment = props.comment.substring(prevItemComment.length + 1);
+ }
+ }
+ }
+ if (!isMap && !sep && !props.found) {
+ // item is a value in a seq
+ // → key & sep are empty, start does not include ? or :
+ const valueNode = value
+ ? composeNode(ctx, value, props, onError)
+ : composeEmptyNode(ctx, props.end, sep, null, props, onError);
+ coll.items.push(valueNode);
+ offset = valueNode.range[2];
+ if (isBlock(value))
+ onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
+ }
+ else {
+ // item is a key+value pair
+ // key value
+ const keyStart = props.end;
+ const keyNode = key
+ ? composeNode(ctx, key, props, onError)
+ : composeEmptyNode(ctx, keyStart, start, null, props, onError);
+ if (isBlock(key))
+ onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
+ // value properties
+ const valueProps = resolveProps.resolveProps(sep !== null && sep !== void 0 ? sep : [], {
+ flow: fcName,
+ indicator: 'map-value-ind',
+ next: value,
+ offset: keyNode.range[2],
+ onError,
+ startOnNewline: false
+ });
+ if (valueProps.found) {
+ if (!isMap && !props.found && ctx.options.strict) {
+ if (sep)
+ for (const st of sep) {
+ if (st === valueProps.found)
+ break;
+ if (st.type === 'newline') {
+ onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
+ break;
+ }
+ }
+ if (props.start < valueProps.found.offset - 1024)
+ onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');
+ }
+ }
+ else if (value) {
+ if ('source' in value && value.source && value.source[0] === ':')
+ onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);
+ else
+ onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);
+ }
+ // value value
+ const valueNode = value
+ ? composeNode(ctx, value, valueProps, onError)
+ : valueProps.found
+ ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)
+ : null;
+ if (valueNode) {
+ if (isBlock(value))
+ onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
+ }
+ else if (valueProps.comment) {
+ if (keyNode.comment)
+ keyNode.comment += '\n' + valueProps.comment;
+ else
+ keyNode.comment = valueProps.comment;
+ }
+ const pair = new Pair.Pair(keyNode, valueNode);
+ if (ctx.options.keepSourceTokens)
+ pair.srcToken = collItem;
+ if (isMap) {
+ const map = coll;
+ if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))
+ onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
+ map.items.push(pair);
+ }
+ else {
+ const map = new YAMLMap.YAMLMap(ctx.schema);
+ map.flow = true;
+ map.items.push(pair);
+ coll.items.push(map);
+ }
+ offset = valueNode ? valueNode.range[2] : valueProps.end;
+ }
+ }
+ const expectedEnd = isMap ? '}' : ']';
+ const [ce, ...ee] = fc.end;
+ let cePos = offset;
+ if (ce && ce.source === expectedEnd)
+ cePos = ce.offset + ce.source.length;
+ else {
+ const name = fcName[0].toUpperCase() + fcName.substring(1);
+ const msg = atRoot
+ ? `${name} must end with a ${expectedEnd}`
+ : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;
+ onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);
+ if (ce && ce.source.length !== 1)
+ ee.unshift(ce);
+ }
+ if (ee.length > 0) {
+ const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError);
+ if (end.comment) {
+ if (coll.comment)
+ coll.comment += '\n' + end.comment;
+ else
+ coll.comment = end.comment;
+ }
+ coll.range = [fc.offset, cePos, end.offset];
+ }
+ else {
+ coll.range = [fc.offset, cePos, cePos];
+ }
+ return coll;
+}
+
+exports.resolveFlowCollection = resolveFlowCollection;
diff --git a/node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts b/node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts
new file mode 100644
index 0000000..0c9204d
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts
@@ -0,0 +1,10 @@
+import { Range } from '../nodes/Node.js';
+import { Scalar } from '../nodes/Scalar.js';
+import type { FlowScalar } from '../parse/cst.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function resolveFlowScalar(scalar: FlowScalar, strict: boolean, onError: ComposeErrorHandler): {
+ value: string;
+ type: Scalar.PLAIN | Scalar.QUOTE_DOUBLE | Scalar.QUOTE_SINGLE | null;
+ comment: string;
+ range: Range;
+};
diff --git a/node_modules/yaml/dist/compose/resolve-flow-scalar.js b/node_modules/yaml/dist/compose/resolve-flow-scalar.js
new file mode 100644
index 0000000..f7bf4e5
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-flow-scalar.js
@@ -0,0 +1,226 @@
+'use strict';
+
+var Scalar = require('../nodes/Scalar.js');
+var resolveEnd = require('./resolve-end.js');
+
+function resolveFlowScalar(scalar, strict, onError) {
+ const { offset, type, source, end } = scalar;
+ let _type;
+ let value;
+ const _onError = (rel, code, msg) => onError(offset + rel, code, msg);
+ switch (type) {
+ case 'scalar':
+ _type = Scalar.Scalar.PLAIN;
+ value = plainValue(source, _onError);
+ break;
+ case 'single-quoted-scalar':
+ _type = Scalar.Scalar.QUOTE_SINGLE;
+ value = singleQuotedValue(source, _onError);
+ break;
+ case 'double-quoted-scalar':
+ _type = Scalar.Scalar.QUOTE_DOUBLE;
+ value = doubleQuotedValue(source, _onError);
+ break;
+ /* istanbul ignore next should not happen */
+ default:
+ onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);
+ return {
+ value: '',
+ type: null,
+ comment: '',
+ range: [offset, offset + source.length, offset + source.length]
+ };
+ }
+ const valueEnd = offset + source.length;
+ const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError);
+ return {
+ value,
+ type: _type,
+ comment: re.comment,
+ range: [offset, valueEnd, re.offset]
+ };
+}
+function plainValue(source, onError) {
+ let badChar = '';
+ switch (source[0]) {
+ /* istanbul ignore next should not happen */
+ case '\t':
+ badChar = 'a tab character';
+ break;
+ case ',':
+ badChar = 'flow indicator character ,';
+ break;
+ case '%':
+ badChar = 'directive indicator character %';
+ break;
+ case '|':
+ case '>': {
+ badChar = `block scalar indicator ${source[0]}`;
+ break;
+ }
+ case '@':
+ case '`': {
+ badChar = `reserved character ${source[0]}`;
+ break;
+ }
+ }
+ if (badChar)
+ onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);
+ return foldLines(source);
+}
+function singleQuotedValue(source, onError) {
+ if (source[source.length - 1] !== "'" || source.length === 1)
+ onError(source.length, 'MISSING_CHAR', "Missing closing 'quote");
+ return foldLines(source.slice(1, -1)).replace(/''/g, "'");
+}
+function foldLines(source) {
+ var _a;
+ /**
+ * The negative lookbehind here and in the `re` RegExp is to
+ * prevent causing a polynomial search time in certain cases.
+ *
+ * The try-catch is for Safari, which doesn't support this yet:
+ * https://caniuse.com/js-regexp-lookbehind
+ */
+ let first, line;
+ try {
+ first = new RegExp('(.*?)(?<![ \t])[ \t]*\r?\n', 'sy');
+ line = new RegExp('[ \t]*(.*?)(?:(?<![ \t])[ \t]*)?\r?\n', 'sy');
+ }
+ catch (_) {
+ first = /(.*?)[ \t]*\r?\n/sy;
+ line = /[ \t]*(.*?)[ \t]*\r?\n/sy;
+ }
+ let match = first.exec(source);
+ if (!match)
+ return source;
+ let res = match[1];
+ let sep = ' ';
+ let pos = first.lastIndex;
+ line.lastIndex = pos;
+ while ((match = line.exec(source))) {
+ if (match[1] === '') {
+ if (sep === '\n')
+ res += sep;
+ else
+ sep = '\n';
+ }
+ else {
+ res += sep + match[1];
+ sep = ' ';
+ }
+ pos = line.lastIndex;
+ }
+ const last = /[ \t]*(.*)/sy;
+ last.lastIndex = pos;
+ match = last.exec(source);
+ return res + sep + ((_a = match === null || match === void 0 ? void 0 : match[1]) !== null && _a !== void 0 ? _a : '');
+}
+function doubleQuotedValue(source, onError) {
+ let res = '';
+ for (let i = 1; i < source.length - 1; ++i) {
+ const ch = source[i];
+ if (ch === '\r' && source[i + 1] === '\n')
+ continue;
+ if (ch === '\n') {
+ const { fold, offset } = foldNewline(source, i);
+ res += fold;
+ i = offset;
+ }
+ else if (ch === '\\') {
+ let next = source[++i];
+ const cc = escapeCodes[next];
+ if (cc)
+ res += cc;
+ else if (next === '\n') {
+ // skip escaped newlines, but still trim the following line
+ next = source[i + 1];
+ while (next === ' ' || next === '\t')
+ next = source[++i + 1];
+ }
+ else if (next === '\r' && source[i + 1] === '\n') {
+ // skip escaped CRLF newlines, but still trim the following line
+ next = source[++i + 1];
+ while (next === ' ' || next === '\t')
+ next = source[++i + 1];
+ }
+ else if (next === 'x' || next === 'u' || next === 'U') {
+ const length = { x: 2, u: 4, U: 8 }[next];
+ res += parseCharCode(source, i + 1, length, onError);
+ i += length;
+ }
+ else {
+ const raw = source.substr(i - 1, 2);
+ onError(i - 1, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
+ res += raw;
+ }
+ }
+ else if (ch === ' ' || ch === '\t') {
+ // trim trailing whitespace
+ const wsStart = i;
+ let next = source[i + 1];
+ while (next === ' ' || next === '\t')
+ next = source[++i + 1];
+ if (next !== '\n' && !(next === '\r' && source[i + 2] === '\n'))
+ res += i > wsStart ? source.slice(wsStart, i + 1) : ch;
+ }
+ else {
+ res += ch;
+ }
+ }
+ if (source[source.length - 1] !== '"' || source.length === 1)
+ onError(source.length, 'MISSING_CHAR', 'Missing closing "quote');
+ return res;
+}
+/**
+ * Fold a single newline into a space, multiple newlines to N - 1 newlines.
+ * Presumes `source[offset] === '\n'`
+ */
+function foldNewline(source, offset) {
+ let fold = '';
+ let ch = source[offset + 1];
+ while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') {
+ if (ch === '\r' && source[offset + 2] !== '\n')
+ break;
+ if (ch === '\n')
+ fold += '\n';
+ offset += 1;
+ ch = source[offset + 1];
+ }
+ if (!fold)
+ fold = ' ';
+ return { fold, offset };
+}
+const escapeCodes = {
+ '0': '\0',
+ a: '\x07',
+ b: '\b',
+ e: '\x1b',
+ f: '\f',
+ n: '\n',
+ r: '\r',
+ t: '\t',
+ v: '\v',
+ N: '\u0085',
+ _: '\u00a0',
+ L: '\u2028',
+ P: '\u2029',
+ ' ': ' ',
+ '"': '"',
+ '/': '/',
+ '\\': '\\',
+ '\t': '\t'
+};
+function parseCharCode(source, offset, length, onError) {
+ const cc = source.substr(offset, length);
+ const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);
+ const code = ok ? parseInt(cc, 16) : NaN;
+ if (isNaN(code)) {
+ const raw = source.substr(offset - 2, length + 2);
+ onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
+ return raw;
+ }
+ return String.fromCodePoint(code);
+}
+
+exports.resolveFlowScalar = resolveFlowScalar;
diff --git a/node_modules/yaml/dist/compose/resolve-props.d.ts b/node_modules/yaml/dist/compose/resolve-props.d.ts
new file mode 100644
index 0000000..e324d0c
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-props.d.ts
@@ -0,0 +1,21 @@
+import type { SourceToken, Token } from '../parse/cst.js';
+import type { ComposeErrorHandler } from './composer.js';
+export interface ResolvePropsArg {
+ flow?: 'flow map' | 'flow sequence';
+ indicator: 'doc-start' | 'explicit-key-ind' | 'map-value-ind' | 'seq-item-ind';
+ next: Token | null | undefined;
+ offset: number;
+ onError: ComposeErrorHandler;
+ startOnNewline: boolean;
+}
+export declare function resolveProps(tokens: SourceToken[], { flow, indicator, next, offset, onError, startOnNewline }: ResolvePropsArg): {
+ comma: SourceToken | null;
+ found: SourceToken | null;
+ spaceBefore: boolean;
+ comment: string;
+ hasNewline: boolean;
+ anchor: SourceToken | null;
+ tag: SourceToken | null;
+ end: number;
+ start: number;
+};
diff --git a/node_modules/yaml/dist/compose/resolve-props.js b/node_modules/yaml/dist/compose/resolve-props.js
new file mode 100644
index 0000000..8a20e01
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-props.js
@@ -0,0 +1,132 @@
+'use strict';
+
+function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {
+ let spaceBefore = false;
+ let atNewline = startOnNewline;
+ let hasSpace = startOnNewline;
+ let comment = '';
+ let commentSep = '';
+ let hasNewline = false;
+ let reqSpace = false;
+ let anchor = null;
+ let tag = null;
+ let comma = null;
+ let found = null;
+ let start = null;
+ for (const token of tokens) {
+ if (reqSpace) {
+ if (token.type !== 'space' &&
+ token.type !== 'newline' &&
+ token.type !== 'comma')
+ onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
+ reqSpace = false;
+ }
+ switch (token.type) {
+ case 'space':
+ // At the doc level, tabs at line start may be parsed
+ // as leading white space rather than indentation.
+ // In a flow collection, only the parser handles indent.
+ if (!flow &&
+ atNewline &&
+ indicator !== 'doc-start' &&
+ token.source[0] === '\t')
+ onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
+ hasSpace = true;
+ break;
+ case 'comment': {
+ if (!hasSpace)
+ onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
+ const cb = token.source.substring(1) || ' ';
+ if (!comment)
+ comment = cb;
+ else
+ comment += commentSep + cb;
+ commentSep = '';
+ atNewline = false;
+ break;
+ }
+ case 'newline':
+ if (atNewline) {
+ if (comment)
+ comment += token.source;
+ else
+ spaceBefore = true;
+ }
+ else
+ commentSep += token.source;
+ atNewline = true;
+ hasNewline = true;
+ hasSpace = true;
+ break;
+ case 'anchor':
+ if (anchor)
+ onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');
+ if (token.source.endsWith(':'))
+ onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);
+ anchor = token;
+ if (start === null)
+ start = token.offset;
+ atNewline = false;
+ hasSpace = false;
+ reqSpace = true;
+ break;
+ case 'tag': {
+ if (tag)
+ onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');
+ tag = token;
+ if (start === null)
+ start = token.offset;
+ atNewline = false;
+ hasSpace = false;
+ reqSpace = true;
+ break;
+ }
+ case indicator:
+ // Could here handle preceding comments differently
+ if (anchor || tag)
+ onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);
+ if (found)
+ onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow !== null && flow !== void 0 ? flow : 'collection'}`);
+ found = token;
+ atNewline = false;
+ hasSpace = false;
+ break;
+ case 'comma':
+ if (flow) {
+ if (comma)
+ onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);
+ comma = token;
+ atNewline = false;
+ hasSpace = false;
+ break;
+ }
+ // else fallthrough
+ default:
+ onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);
+ atNewline = false;
+ hasSpace = false;
+ }
+ }
+ const last = tokens[tokens.length - 1];
+ const end = last ? last.offset + last.source.length : offset;
+ if (reqSpace &&
+ next &&
+ next.type !== 'space' &&
+ next.type !== 'newline' &&
+ next.type !== 'comma' &&
+ (next.type !== 'scalar' || next.source !== ''))
+ onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
+ return {
+ comma,
+ found,
+ spaceBefore,
+ comment,
+ hasNewline,
+ anchor,
+ tag,
+ end,
+ start: start !== null && start !== void 0 ? start : end
+ };
+}
+
+exports.resolveProps = resolveProps;
diff --git a/node_modules/yaml/dist/compose/util-contains-newline.d.ts b/node_modules/yaml/dist/compose/util-contains-newline.d.ts
new file mode 100644
index 0000000..8155be0
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-contains-newline.d.ts
@@ -0,0 +1,2 @@
+import type { Token } from '../parse/cst.js';
+export declare function containsNewline(key: Token | null | undefined): boolean | null;
diff --git a/node_modules/yaml/dist/compose/util-contains-newline.js b/node_modules/yaml/dist/compose/util-contains-newline.js
new file mode 100644
index 0000000..e7aa82d
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-contains-newline.js
@@ -0,0 +1,36 @@
+'use strict';
+
+function containsNewline(key) {
+ if (!key)
+ return null;
+ switch (key.type) {
+ case 'alias':
+ case 'scalar':
+ case 'double-quoted-scalar':
+ case 'single-quoted-scalar':
+ if (key.source.includes('\n'))
+ return true;
+ if (key.end)
+ for (const st of key.end)
+ if (st.type === 'newline')
+ return true;
+ return false;
+ case 'flow-collection':
+ for (const it of key.items) {
+ for (const st of it.start)
+ if (st.type === 'newline')
+ return true;
+ if (it.sep)
+ for (const st of it.sep)
+ if (st.type === 'newline')
+ return true;
+ if (containsNewline(it.key) || containsNewline(it.value))
+ return true;
+ }
+ return false;
+ default:
+ return true;
+ }
+}
+
+exports.containsNewline = containsNewline;
diff --git a/node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts b/node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts
new file mode 100644
index 0000000..90499b8
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts
@@ -0,0 +1,2 @@
+import type { Token } from '../parse/cst.js';
+export declare function emptyScalarPosition(offset: number, before: Token[] | undefined, pos: number | null): number;
diff --git a/node_modules/yaml/dist/compose/util-empty-scalar-position.js b/node_modules/yaml/dist/compose/util-empty-scalar-position.js
new file mode 100644
index 0000000..32951b3
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-empty-scalar-position.js
@@ -0,0 +1,29 @@
+'use strict';
+
+function emptyScalarPosition(offset, before, pos) {
+ if (before) {
+ if (pos === null)
+ pos = before.length;
+ for (let i = pos - 1; i >= 0; --i) {
+ let st = before[i];
+ switch (st.type) {
+ case 'space':
+ case 'comment':
+ case 'newline':
+ offset -= st.source.length;
+ continue;
+ }
+ // Technically, an empty scalar is immediately after the last non-empty
+ // node, but it's more useful to place it after any whitespace.
+ st = before[++i];
+ while ((st === null || st === void 0 ? void 0 : st.type) === 'space') {
+ offset += st.source.length;
+ st = before[++i];
+ }
+ break;
+ }
+ }
+ return offset;
+}
+
+exports.emptyScalarPosition = emptyScalarPosition;
diff --git a/node_modules/yaml/dist/compose/util-flow-indent-check.d.ts b/node_modules/yaml/dist/compose/util-flow-indent-check.d.ts
new file mode 100644
index 0000000..64ed1fc
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-flow-indent-check.d.ts
@@ -0,0 +1,3 @@
+import { Token } from '../parse/cst';
+import { ComposeErrorHandler } from './composer';
+export declare function flowIndentCheck(indent: number, fc: Token | null | undefined, onError: ComposeErrorHandler): void;
diff --git a/node_modules/yaml/dist/compose/util-flow-indent-check.js b/node_modules/yaml/dist/compose/util-flow-indent-check.js
new file mode 100644
index 0000000..aa7c998
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-flow-indent-check.js
@@ -0,0 +1,17 @@
+'use strict';
+
+var utilContainsNewline = require('./util-contains-newline.js');
+
+function flowIndentCheck(indent, fc, onError) {
+ if ((fc === null || fc === void 0 ? void 0 : fc.type) === 'flow-collection') {
+ const end = fc.end[0];
+ if (end.indent === indent &&
+ (end.source === ']' || end.source === '}') &&
+ utilContainsNewline.containsNewline(fc)) {
+ const msg = 'Flow end indicator should be more indented than parent';
+ onError(end, 'BAD_INDENT', msg, true);
+ }
+ }
+}
+
+exports.flowIndentCheck = flowIndentCheck;
diff --git a/node_modules/yaml/dist/compose/util-map-includes.d.ts b/node_modules/yaml/dist/compose/util-map-includes.d.ts
new file mode 100644
index 0000000..5d2c4b3
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-map-includes.d.ts
@@ -0,0 +1,4 @@
+import { ParsedNode } from '../nodes/Node';
+import { Pair } from '../nodes/Pair';
+import { ComposeContext } from './compose-node';
+export declare function mapIncludes(ctx: ComposeContext, items: Pair<ParsedNode>[], search: ParsedNode): boolean;
diff --git a/node_modules/yaml/dist/compose/util-map-includes.js b/node_modules/yaml/dist/compose/util-map-includes.js
new file mode 100644
index 0000000..ab03be1
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-map-includes.js
@@ -0,0 +1,19 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+
+function mapIncludes(ctx, items, search) {
+ const { uniqueKeys } = ctx.options;
+ if (uniqueKeys === false)
+ return false;
+ const isEqual = typeof uniqueKeys === 'function'
+ ? uniqueKeys
+ : (a, b) => a === b ||
+ (Node.isScalar(a) &&
+ Node.isScalar(b) &&
+ a.value === b.value &&
+ !(a.value === '<<' && ctx.schema.merge));
+ return items.some(pair => isEqual(pair.key, search));
+}
+
+exports.mapIncludes = mapIncludes;
diff --git a/node_modules/yaml/dist/doc/Document.d.ts b/node_modules/yaml/dist/doc/Document.d.ts
new file mode 100644
index 0000000..a3a7e34
--- /dev/null
+++ b/node_modules/yaml/dist/doc/Document.d.ts
@@ -0,0 +1,139 @@
+import type { YAMLError, YAMLWarning } from '../errors.js';
+import { Alias } from '../nodes/Alias.js';
+import { Node, NODE_TYPE, ParsedNode, Range } from '../nodes/Node.js';
+import { Pair } from '../nodes/Pair.js';
+import type { Scalar } from '../nodes/Scalar.js';
+import type { YAMLMap } from '../nodes/YAMLMap.js';
+import type { YAMLSeq } from '../nodes/YAMLSeq.js';
+import type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from '../options.js';
+import { Schema } from '../schema/Schema.js';
+import { Directives } from './directives.js';
+export declare type Replacer = any[] | ((key: any, value: any) => unknown);
+export declare namespace Document {
+ interface Parsed<T extends ParsedNode = ParsedNode> extends Document<T> {
+ directives: Directives;
+ range: Range;
+ }
+}
+export declare class Document<T = unknown> {
+ readonly [NODE_TYPE]: symbol;
+ /** A comment before this Document */
+ commentBefore: string | null;
+ /** A comment immediately after this Document */
+ comment: string | null;
+ /** The document contents. */
+ contents: T | null;
+ directives?: Directives;
+ /** Errors encountered during parsing. */
+ errors: YAMLError[];
+ options: Required<Omit<ParseOptions & DocumentOptions, 'lineCounter' | 'directives' | 'version'>>;
+ /**
+ * The `[start, value-end, node-end]` character offsets for the part of the
+ * source parsed into this document (undefined if not parsed). The `value-end`
+ * and `node-end` positions are themselves not included in their respective
+ * ranges.
+ */
+ range?: Range;
+ /** The schema used with the document. Use `setSchema()` to change. */
+ schema: Schema;
+ /** Warnings encountered during parsing. */
+ warnings: YAMLWarning[];
+ /**
+ * @param value - The initial value for the document, which will be wrapped
+ * in a Node container.
+ */
+ constructor(value?: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions);
+ constructor(value: any, replacer: null | Replacer, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions);
+ /**
+ * Create a deep copy of this Document and its contents.
+ *
+ * Custom Node values that inherit from `Object` still refer to their original instances.
+ */
+ clone(): Document<T>;
+ /** Adds a value to the document. */
+ add(value: any): void;
+ /** Adds a value to the document. */
+ addIn(path: Iterable<unknown>, value: unknown): void;
+ /**
+ * Create a new `Alias` node, ensuring that the target `node` has the required anchor.
+ *
+ * If `node` already has an anchor, `name` is ignored.
+ * Otherwise, the `node.anchor` value will be set to `name`,
+ * or if an anchor with that name is already present in the document,
+ * `name` will be used as a prefix for a new unique anchor.
+ * If `name` is undefined, the generated anchor will use 'a' as a prefix.
+ */
+ createAlias(node: Scalar | YAMLMap | YAMLSeq, name?: string): Alias;
+ /**
+ * Convert any value into a `Node` using the current schema, recursively
+ * turning objects into collections.
+ */
+ createNode(value: unknown, options?: CreateNodeOptions): Node;
+ createNode(value: unknown, replacer: Replacer | CreateNodeOptions | null, options?: CreateNodeOptions): Node;
+ /**
+ * Convert a key and a value into a `Pair` using the current schema,
+ * recursively wrapping all values as `Scalar` or `Collection` nodes.
+ */
+ createPair<K extends Node = Node, V extends Node = Node>(key: unknown, value: unknown, options?: CreateNodeOptions): Pair<K, V>;
+ /**
+ * Removes a value from the document.
+ * @returns `true` if the item was found and removed.
+ */
+ delete(key: any): boolean;
+ /**
+ * Removes a value from the document.
+ * @returns `true` if the item was found and removed.
+ */
+ deleteIn(path: Iterable<unknown>): boolean;
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ get(key: unknown, keepScalar?: boolean): unknown;
+ /**
+ * Returns item at `path`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ getIn(path: Iterable<unknown>, keepScalar?: boolean): unknown;
+ /**
+ * Checks if the document includes a value with the key `key`.
+ */
+ has(key: unknown): boolean;
+ /**
+ * Checks if the document includes a value at `path`.
+ */
+ hasIn(path: Iterable<unknown>): boolean;
+ /**
+ * Sets a value in this document. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ set(key: any, value: unknown): void;
+ /**
+ * Sets a value in this document. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ setIn(path: Iterable<unknown>, value: unknown): void;
+ /**
+ * Change the YAML version and schema used by the document.
+ * A `null` version disables support for directives, explicit tags, anchors, and aliases.
+ * It also requires the `schema` option to be given as a `Schema` instance value.
+ *
+ * Overrides all previously set schema options.
+ */
+ setSchema(version: '1.1' | '1.2' | 'next' | null, options?: SchemaOptions): void;
+ /** A plain JavaScript representation of the document `contents`. */
+ toJS(opt?: ToJSOptions & {
+ [ignored: string]: unknown;
+ }): any;
+ /**
+ * A JSON representation of the document `contents`.
+ *
+ * @param jsonArg Used by `JSON.stringify` to indicate the array index or
+ * property name.
+ */
+ toJSON(jsonArg?: string | null, onAnchor?: ToJSOptions['onAnchor']): any;
+ /** A YAML representation of the document. */
+ toString(options?: ToStringOptions): string;
+}
diff --git a/node_modules/yaml/dist/doc/Document.js b/node_modules/yaml/dist/doc/Document.js
new file mode 100644
index 0000000..d38d080
--- /dev/null
+++ b/node_modules/yaml/dist/doc/Document.js
@@ -0,0 +1,334 @@
+'use strict';
+
+var Alias = require('../nodes/Alias.js');
+var Collection = require('../nodes/Collection.js');
+var Node = require('../nodes/Node.js');
+var Pair = require('../nodes/Pair.js');
+var toJS = require('../nodes/toJS.js');
+var Schema = require('../schema/Schema.js');
+var stringify = require('../stringify/stringify.js');
+var stringifyDocument = require('../stringify/stringifyDocument.js');
+var anchors = require('./anchors.js');
+var applyReviver = require('./applyReviver.js');
+var createNode = require('./createNode.js');
+var directives = require('./directives.js');
+
+class Document {
+ constructor(value, replacer, options) {
+ /** A comment before this Document */
+ this.commentBefore = null;
+ /** A comment immediately after this Document */
+ this.comment = null;
+ /** Errors encountered during parsing. */
+ this.errors = [];
+ /** Warnings encountered during parsing. */
+ this.warnings = [];
+ Object.defineProperty(this, Node.NODE_TYPE, { value: Node.DOC });
+ let _replacer = null;
+ if (typeof replacer === 'function' || Array.isArray(replacer)) {
+ _replacer = replacer;
+ }
+ else if (options === undefined && replacer) {
+ options = replacer;
+ replacer = undefined;
+ }
+ const opt = Object.assign({
+ intAsBigInt: false,
+ keepSourceTokens: false,
+ logLevel: 'warn',
+ prettyErrors: true,
+ strict: true,
+ uniqueKeys: true,
+ version: '1.2'
+ }, options);
+ this.options = opt;
+ let { version } = opt;
+ if (options === null || options === void 0 ? void 0 : options.directives) {
+ this.directives = options.directives.atDocument();
+ if (this.directives.yaml.explicit)
+ version = this.directives.yaml.version;
+ }
+ else
+ this.directives = new directives.Directives({ version });
+ this.setSchema(version, options);
+ if (value === undefined)
+ this.contents = null;
+ else {
+ this.contents = this.createNode(value, _replacer, options);
+ }
+ }
+ /**
+ * Create a deep copy of this Document and its contents.
+ *
+ * Custom Node values that inherit from `Object` still refer to their original instances.
+ */
+ clone() {
+ const copy = Object.create(Document.prototype, {
+ [Node.NODE_TYPE]: { value: Node.DOC }
+ });
+ copy.commentBefore = this.commentBefore;
+ copy.comment = this.comment;
+ copy.errors = this.errors.slice();
+ copy.warnings = this.warnings.slice();
+ copy.options = Object.assign({}, this.options);
+ if (this.directives)
+ copy.directives = this.directives.clone();
+ copy.schema = this.schema.clone();
+ copy.contents = Node.isNode(this.contents)
+ ? this.contents.clone(copy.schema)
+ : this.contents;
+ if (this.range)
+ copy.range = this.range.slice();
+ return copy;
+ }
+ /** Adds a value to the document. */
+ add(value) {
+ if (assertCollection(this.contents))
+ this.contents.add(value);
+ }
+ /** Adds a value to the document. */
+ addIn(path, value) {
+ if (assertCollection(this.contents))
+ this.contents.addIn(path, value);
+ }
+ /**
+ * Create a new `Alias` node, ensuring that the target `node` has the required anchor.
+ *
+ * If `node` already has an anchor, `name` is ignored.
+ * Otherwise, the `node.anchor` value will be set to `name`,
+ * or if an anchor with that name is already present in the document,
+ * `name` will be used as a prefix for a new unique anchor.
+ * If `name` is undefined, the generated anchor will use 'a' as a prefix.
+ */
+ createAlias(node, name) {
+ if (!node.anchor) {
+ const prev = anchors.anchorNames(this);
+ node.anchor =
+ // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
+ !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name;
+ }
+ return new Alias.Alias(node.anchor);
+ }
+ createNode(value, replacer, options) {
+ let _replacer = undefined;
+ if (typeof replacer === 'function') {
+ value = replacer.call({ '': value }, '', value);
+ _replacer = replacer;
+ }
+ else if (Array.isArray(replacer)) {
+ const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;
+ const asStr = replacer.filter(keyToStr).map(String);
+ if (asStr.length > 0)
+ replacer = replacer.concat(asStr);
+ _replacer = replacer;
+ }
+ else if (options === undefined && replacer) {
+ options = replacer;
+ replacer = undefined;
+ }
+ const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options !== null && options !== void 0 ? options : {};
+ const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this,
+ // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
+ anchorPrefix || 'a');
+ const ctx = {
+ aliasDuplicateObjects: aliasDuplicateObjects !== null && aliasDuplicateObjects !== void 0 ? aliasDuplicateObjects : true,
+ keepUndefined: keepUndefined !== null && keepUndefined !== void 0 ? keepUndefined : false,
+ onAnchor,
+ onTagObj,
+ replacer: _replacer,
+ schema: this.schema,
+ sourceObjects
+ };
+ const node = createNode.createNode(value, tag, ctx);
+ if (flow && Node.isCollection(node))
+ node.flow = true;
+ setAnchors();
+ return node;
+ }
+ /**
+ * Convert a key and a value into a `Pair` using the current schema,
+ * recursively wrapping all values as `Scalar` or `Collection` nodes.
+ */
+ createPair(key, value, options = {}) {
+ const k = this.createNode(key, null, options);
+ const v = this.createNode(value, null, options);
+ return new Pair.Pair(k, v);
+ }
+ /**
+ * Removes a value from the document.
+ * @returns `true` if the item was found and removed.
+ */
+ delete(key) {
+ return assertCollection(this.contents) ? this.contents.delete(key) : false;
+ }
+ /**
+ * Removes a value from the document.
+ * @returns `true` if the item was found and removed.
+ */
+ deleteIn(path) {
+ if (Collection.isEmptyPath(path)) {
+ if (this.contents == null)
+ return false;
+ this.contents = null;
+ return true;
+ }
+ return assertCollection(this.contents)
+ ? this.contents.deleteIn(path)
+ : false;
+ }
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ get(key, keepScalar) {
+ return Node.isCollection(this.contents)
+ ? this.contents.get(key, keepScalar)
+ : undefined;
+ }
+ /**
+ * Returns item at `path`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ getIn(path, keepScalar) {
+ if (Collection.isEmptyPath(path))
+ return !keepScalar && Node.isScalar(this.contents)
+ ? this.contents.value
+ : this.contents;
+ return Node.isCollection(this.contents)
+ ? this.contents.getIn(path, keepScalar)
+ : undefined;
+ }
+ /**
+ * Checks if the document includes a value with the key `key`.
+ */
+ has(key) {
+ return Node.isCollection(this.contents) ? this.contents.has(key) : false;
+ }
+ /**
+ * Checks if the document includes a value at `path`.
+ */
+ hasIn(path) {
+ if (Collection.isEmptyPath(path))
+ return this.contents !== undefined;
+ return Node.isCollection(this.contents) ? this.contents.hasIn(path) : false;
+ }
+ /**
+ * Sets a value in this document. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ set(key, value) {
+ if (this.contents == null) {
+ this.contents = Collection.collectionFromPath(this.schema, [key], value);
+ }
+ else if (assertCollection(this.contents)) {
+ this.contents.set(key, value);
+ }
+ }
+ /**
+ * Sets a value in this document. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ setIn(path, value) {
+ if (Collection.isEmptyPath(path))
+ this.contents = value;
+ else if (this.contents == null) {
+ this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value);
+ }
+ else if (assertCollection(this.contents)) {
+ this.contents.setIn(path, value);
+ }
+ }
+ /**
+ * Change the YAML version and schema used by the document.
+ * A `null` version disables support for directives, explicit tags, anchors, and aliases.
+ * It also requires the `schema` option to be given as a `Schema` instance value.
+ *
+ * Overrides all previously set schema options.
+ */
+ setSchema(version, options = {}) {
+ if (typeof version === 'number')
+ version = String(version);
+ let opt;
+ switch (version) {
+ case '1.1':
+ if (this.directives)
+ this.directives.yaml.version = '1.1';
+ else
+ this.directives = new directives.Directives({ version: '1.1' });
+ opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };
+ break;
+ case '1.2':
+ case 'next':
+ if (this.directives)
+ this.directives.yaml.version = version;
+ else
+ this.directives = new directives.Directives({ version });
+ opt = { merge: false, resolveKnownTags: true, schema: 'core' };
+ break;
+ case null:
+ if (this.directives)
+ delete this.directives;
+ opt = null;
+ break;
+ default: {
+ const sv = JSON.stringify(version);
+ throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);
+ }
+ }
+ // Not using `instanceof Schema` to allow for duck typing
+ if (options.schema instanceof Object)
+ this.schema = options.schema;
+ else if (opt)
+ this.schema = new Schema.Schema(Object.assign(opt, options));
+ else
+ throw new Error(`With a null YAML version, the { schema: Schema } option is required`);
+ }
+ // json & jsonArg are only used from toJSON()
+ toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
+ const ctx = {
+ anchors: new Map(),
+ doc: this,
+ keep: !json,
+ mapAsMap: mapAsMap === true,
+ mapKeyWarned: false,
+ maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,
+ stringify: stringify.stringify
+ };
+ const res = toJS.toJS(this.contents, jsonArg !== null && jsonArg !== void 0 ? jsonArg : '', ctx);
+ if (typeof onAnchor === 'function')
+ for (const { count, res } of ctx.anchors.values())
+ onAnchor(res, count);
+ return typeof reviver === 'function'
+ ? applyReviver.applyReviver(reviver, { '': res }, '', res)
+ : res;
+ }
+ /**
+ * A JSON representation of the document `contents`.
+ *
+ * @param jsonArg Used by `JSON.stringify` to indicate the array index or
+ * property name.
+ */
+ toJSON(jsonArg, onAnchor) {
+ return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });
+ }
+ /** A YAML representation of the document. */
+ toString(options = {}) {
+ if (this.errors.length > 0)
+ throw new Error('Document with errors cannot be stringified');
+ if ('indent' in options &&
+ (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {
+ const s = JSON.stringify(options.indent);
+ throw new Error(`"indent" option must be a positive integer, not ${s}`);
+ }
+ return stringifyDocument.stringifyDocument(this, options);
+ }
+}
+function assertCollection(contents) {
+ if (Node.isCollection(contents))
+ return true;
+ throw new Error('Expected a YAML collection as document contents');
+}
+
+exports.Document = Document;
diff --git a/node_modules/yaml/dist/doc/anchors.d.ts b/node_modules/yaml/dist/doc/anchors.d.ts
new file mode 100644
index 0000000..d321574
--- /dev/null
+++ b/node_modules/yaml/dist/doc/anchors.d.ts
@@ -0,0 +1,24 @@
+import { Node } from '../nodes/Node.js';
+import type { Document } from './Document.js';
+/**
+ * Verify that the input string is a valid anchor.
+ *
+ * Will throw on errors.
+ */
+export declare function anchorIsValid(anchor: string): true;
+export declare function anchorNames(root: Document | Node): Set<string>;
+/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */
+export declare function findNewAnchor(prefix: string, exclude: Set<string>): string;
+export declare function createNodeAnchors(doc: Document, prefix: string): {
+ onAnchor: (source: unknown) => string;
+ /**
+ * With circular references, the source node is only resolved after all
+ * of its child nodes are. This is why anchors are set only after all of
+ * the nodes have been created.
+ */
+ setAnchors: () => void;
+ sourceObjects: Map<unknown, {
+ anchor: string | null;
+ node: Node | null;
+ }>;
+};
diff --git a/node_modules/yaml/dist/doc/anchors.js b/node_modules/yaml/dist/doc/anchors.js
new file mode 100644
index 0000000..5c51bdb
--- /dev/null
+++ b/node_modules/yaml/dist/doc/anchors.js
@@ -0,0 +1,77 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var visit = require('../visit.js');
+
+/**
+ * Verify that the input string is a valid anchor.
+ *
+ * Will throw on errors.
+ */
+function anchorIsValid(anchor) {
+ if (/[\x00-\x19\s,[\]{}]/.test(anchor)) {
+ const sa = JSON.stringify(anchor);
+ const msg = `Anchor must not contain whitespace or control characters: ${sa}`;
+ throw new Error(msg);
+ }
+ return true;
+}
+function anchorNames(root) {
+ const anchors = new Set();
+ visit.visit(root, {
+ Value(_key, node) {
+ if (node.anchor)
+ anchors.add(node.anchor);
+ }
+ });
+ return anchors;
+}
+/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */
+function findNewAnchor(prefix, exclude) {
+ for (let i = 1; true; ++i) {
+ const name = `${prefix}${i}`;
+ if (!exclude.has(name))
+ return name;
+ }
+}
+function createNodeAnchors(doc, prefix) {
+ const aliasObjects = [];
+ const sourceObjects = new Map();
+ let prevAnchors = null;
+ return {
+ onAnchor: (source) => {
+ aliasObjects.push(source);
+ if (!prevAnchors)
+ prevAnchors = anchorNames(doc);
+ const anchor = findNewAnchor(prefix, prevAnchors);
+ prevAnchors.add(anchor);
+ return anchor;
+ },
+ /**
+ * With circular references, the source node is only resolved after all
+ * of its child nodes are. This is why anchors are set only after all of
+ * the nodes have been created.
+ */
+ setAnchors: () => {
+ for (const source of aliasObjects) {
+ const ref = sourceObjects.get(source);
+ if (typeof ref === 'object' &&
+ ref.anchor &&
+ (Node.isScalar(ref.node) || Node.isCollection(ref.node))) {
+ ref.node.anchor = ref.anchor;
+ }
+ else {
+ const error = new Error('Failed to resolve repeated object (this should not happen)');
+ error.source = source;
+ throw error;
+ }
+ }
+ },
+ sourceObjects
+ };
+}
+
+exports.anchorIsValid = anchorIsValid;
+exports.anchorNames = anchorNames;
+exports.createNodeAnchors = createNodeAnchors;
+exports.findNewAnchor = findNewAnchor;
diff --git a/node_modules/yaml/dist/doc/applyReviver.d.ts b/node_modules/yaml/dist/doc/applyReviver.d.ts
new file mode 100644
index 0000000..ac28f2c
--- /dev/null
+++ b/node_modules/yaml/dist/doc/applyReviver.d.ts
@@ -0,0 +1,9 @@
+export declare type Reviver = (key: unknown, value: unknown) => unknown;
+/**
+ * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
+ * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
+ * 2021 edition: https://tc39.es/ecma262/#sec-json.parse
+ *
+ * Includes extensions for handling Map and Set objects.
+ */
+export declare function applyReviver(reviver: Reviver, obj: unknown, key: unknown, val: any): unknown;
diff --git a/node_modules/yaml/dist/doc/applyReviver.js b/node_modules/yaml/dist/doc/applyReviver.js
new file mode 100644
index 0000000..8734579
--- /dev/null
+++ b/node_modules/yaml/dist/doc/applyReviver.js
@@ -0,0 +1,56 @@
+'use strict';
+
+/**
+ * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
+ * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
+ * 2021 edition: https://tc39.es/ecma262/#sec-json.parse
+ *
+ * Includes extensions for handling Map and Set objects.
+ */
+function applyReviver(reviver, obj, key, val) {
+ if (val && typeof val === 'object') {
+ if (Array.isArray(val)) {
+ for (let i = 0, len = val.length; i < len; ++i) {
+ const v0 = val[i];
+ const v1 = applyReviver(reviver, val, String(i), v0);
+ if (v1 === undefined)
+ delete val[i];
+ else if (v1 !== v0)
+ val[i] = v1;
+ }
+ }
+ else if (val instanceof Map) {
+ for (const k of Array.from(val.keys())) {
+ const v0 = val.get(k);
+ const v1 = applyReviver(reviver, val, k, v0);
+ if (v1 === undefined)
+ val.delete(k);
+ else if (v1 !== v0)
+ val.set(k, v1);
+ }
+ }
+ else if (val instanceof Set) {
+ for (const v0 of Array.from(val)) {
+ const v1 = applyReviver(reviver, val, v0, v0);
+ if (v1 === undefined)
+ val.delete(v0);
+ else if (v1 !== v0) {
+ val.delete(v0);
+ val.add(v1);
+ }
+ }
+ }
+ else {
+ for (const [k, v0] of Object.entries(val)) {
+ const v1 = applyReviver(reviver, val, k, v0);
+ if (v1 === undefined)
+ delete val[k];
+ else if (v1 !== v0)
+ val[k] = v1;
+ }
+ }
+ }
+ return reviver.call(obj, key, val);
+}
+
+exports.applyReviver = applyReviver;
diff --git a/node_modules/yaml/dist/doc/createNode.d.ts b/node_modules/yaml/dist/doc/createNode.d.ts
new file mode 100644
index 0000000..cfbd35c
--- /dev/null
+++ b/node_modules/yaml/dist/doc/createNode.d.ts
@@ -0,0 +1,17 @@
+import { Node } from '../nodes/Node.js';
+import type { Schema } from '../schema/Schema.js';
+import type { CollectionTag, ScalarTag } from '../schema/types.js';
+import type { Replacer } from './Document.js';
+export interface CreateNodeContext {
+ aliasDuplicateObjects: boolean;
+ keepUndefined: boolean;
+ onAnchor: (source: unknown) => string;
+ onTagObj?: (tagObj: ScalarTag | CollectionTag) => void;
+ sourceObjects: Map<unknown, {
+ anchor: string | null;
+ node: Node | null;
+ }>;
+ replacer?: Replacer;
+ schema: Schema;
+}
+export declare function createNode(value: unknown, tagName: string | undefined, ctx: CreateNodeContext): Node;
diff --git a/node_modules/yaml/dist/doc/createNode.js b/node_modules/yaml/dist/doc/createNode.js
new file mode 100644
index 0000000..801a611
--- /dev/null
+++ b/node_modules/yaml/dist/doc/createNode.js
@@ -0,0 +1,89 @@
+'use strict';
+
+var Alias = require('../nodes/Alias.js');
+var Node = require('../nodes/Node.js');
+var Scalar = require('../nodes/Scalar.js');
+
+const defaultTagPrefix = 'tag:yaml.org,2002:';
+function findTagObject(value, tagName, tags) {
+ var _a;
+ if (tagName) {
+ const match = tags.filter(t => t.tag === tagName);
+ const tagObj = (_a = match.find(t => !t.format)) !== null && _a !== void 0 ? _a : match[0];
+ if (!tagObj)
+ throw new Error(`Tag ${tagName} not found`);
+ return tagObj;
+ }
+ return tags.find(t => { var _a; return ((_a = t.identify) === null || _a === void 0 ? void 0 : _a.call(t, value)) && !t.format; });
+}
+function createNode(value, tagName, ctx) {
+ var _a, _b;
+ if (Node.isDocument(value))
+ value = value.contents;
+ if (Node.isNode(value))
+ return value;
+ if (Node.isPair(value)) {
+ const map = (_b = (_a = ctx.schema[Node.MAP]).createNode) === null || _b === void 0 ? void 0 : _b.call(_a, ctx.schema, null, ctx);
+ map.items.push(value);
+ return map;
+ }
+ if (value instanceof String ||
+ value instanceof Number ||
+ value instanceof Boolean ||
+ (typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere
+ ) {
+ // https://tc39.es/ecma262/#sec-serializejsonproperty
+ value = value.valueOf();
+ }
+ const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;
+ // Detect duplicate references to the same object & use Alias nodes for all
+ // after first. The `ref` wrapper allows for circular references to resolve.
+ let ref = undefined;
+ if (aliasDuplicateObjects && value && typeof value === 'object') {
+ ref = sourceObjects.get(value);
+ if (ref) {
+ if (!ref.anchor)
+ ref.anchor = onAnchor(value);
+ return new Alias.Alias(ref.anchor);
+ }
+ else {
+ ref = { anchor: null, node: null };
+ sourceObjects.set(value, ref);
+ }
+ }
+ if (tagName === null || tagName === void 0 ? void 0 : tagName.startsWith('!!'))
+ tagName = defaultTagPrefix + tagName.slice(2);
+ let tagObj = findTagObject(value, tagName, schema.tags);
+ if (!tagObj) {
+ if (value && typeof value.toJSON === 'function') {
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-call
+ value = value.toJSON();
+ }
+ if (!value || typeof value !== 'object') {
+ const node = new Scalar.Scalar(value);
+ if (ref)
+ ref.node = node;
+ return node;
+ }
+ tagObj =
+ value instanceof Map
+ ? schema[Node.MAP]
+ : Symbol.iterator in Object(value)
+ ? schema[Node.SEQ]
+ : schema[Node.MAP];
+ }
+ if (onTagObj) {
+ onTagObj(tagObj);
+ delete ctx.onTagObj;
+ }
+ const node = (tagObj === null || tagObj === void 0 ? void 0 : tagObj.createNode)
+ ? tagObj.createNode(ctx.schema, value, ctx)
+ : new Scalar.Scalar(value);
+ if (tagName)
+ node.tag = tagName;
+ if (ref)
+ ref.node = node;
+ return node;
+}
+
+exports.createNode = createNode;
diff --git a/node_modules/yaml/dist/doc/directives.d.ts b/node_modules/yaml/dist/doc/directives.d.ts
new file mode 100644
index 0000000..ff8a2cb
--- /dev/null
+++ b/node_modules/yaml/dist/doc/directives.d.ts
@@ -0,0 +1,49 @@
+import type { Document } from './Document.js';
+export declare class Directives {
+ static defaultYaml: Directives['yaml'];
+ static defaultTags: Directives['tags'];
+ yaml: {
+ version: '1.1' | '1.2' | 'next';
+ explicit?: boolean;
+ };
+ tags: Record<string, string>;
+ /**
+ * The directives-end/doc-start marker `---`. If `null`, a marker may still be
+ * included in the document's stringified representation.
+ */
+ docStart: true | null;
+ /** The doc-end marker `...`. */
+ docEnd: boolean;
+ /**
+ * Used when parsing YAML 1.1, where:
+ * > If the document specifies no directives, it is parsed using the same
+ * > settings as the previous document. If the document does specify any
+ * > directives, all directives of previous documents, if any, are ignored.
+ */
+ private atNextDocument?;
+ constructor(yaml?: Directives['yaml'], tags?: Directives['tags']);
+ clone(): Directives;
+ /**
+ * During parsing, get a Directives instance for the current document and
+ * update the stream state according to the current version's spec.
+ */
+ atDocument(): Directives;
+ /**
+ * @param onError - May be called even if the action was successful
+ * @returns `true` on success
+ */
+ add(line: string, onError: (offset: number, message: string, warning?: boolean) => void): boolean;
+ /**
+ * Resolves a tag, matching handles to those defined in %TAG directives.
+ *
+ * @returns Resolved tag, which may also be the non-specific tag `'!'` or a
+ * `'!local'` tag, or `null` if unresolvable.
+ */
+ tagName(source: string, onError: (message: string) => void): string | null;
+ /**
+ * Given a fully resolved tag, returns its printable string form,
+ * taking into account current tag prefixes and defaults.
+ */
+ tagString(tag: string): string;
+ toString(doc?: Document): string;
+}
diff --git a/node_modules/yaml/dist/doc/directives.js b/node_modules/yaml/dist/doc/directives.js
new file mode 100644
index 0000000..0fa911c
--- /dev/null
+++ b/node_modules/yaml/dist/doc/directives.js
@@ -0,0 +1,171 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var visit = require('../visit.js');
+
+const escapeChars = {
+ '!': '%21',
+ ',': '%2C',
+ '[': '%5B',
+ ']': '%5D',
+ '{': '%7B',
+ '}': '%7D'
+};
+const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]);
+class Directives {
+ constructor(yaml, tags) {
+ /**
+ * The directives-end/doc-start marker `---`. If `null`, a marker may still be
+ * included in the document's stringified representation.
+ */
+ this.docStart = null;
+ /** The doc-end marker `...`. */
+ this.docEnd = false;
+ this.yaml = Object.assign({}, Directives.defaultYaml, yaml);
+ this.tags = Object.assign({}, Directives.defaultTags, tags);
+ }
+ clone() {
+ const copy = new Directives(this.yaml, this.tags);
+ copy.docStart = this.docStart;
+ return copy;
+ }
+ /**
+ * During parsing, get a Directives instance for the current document and
+ * update the stream state according to the current version's spec.
+ */
+ atDocument() {
+ const res = new Directives(this.yaml, this.tags);
+ switch (this.yaml.version) {
+ case '1.1':
+ this.atNextDocument = true;
+ break;
+ case '1.2':
+ this.atNextDocument = false;
+ this.yaml = {
+ explicit: Directives.defaultYaml.explicit,
+ version: '1.2'
+ };
+ this.tags = Object.assign({}, Directives.defaultTags);
+ break;
+ }
+ return res;
+ }
+ /**
+ * @param onError - May be called even if the action was successful
+ * @returns `true` on success
+ */
+ add(line, onError) {
+ if (this.atNextDocument) {
+ this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };
+ this.tags = Object.assign({}, Directives.defaultTags);
+ this.atNextDocument = false;
+ }
+ const parts = line.trim().split(/[ \t]+/);
+ const name = parts.shift();
+ switch (name) {
+ case '%TAG': {
+ if (parts.length !== 2) {
+ onError(0, '%TAG directive should contain exactly two parts');
+ if (parts.length < 2)
+ return false;
+ }
+ const [handle, prefix] = parts;
+ this.tags[handle] = prefix;
+ return true;
+ }
+ case '%YAML': {
+ this.yaml.explicit = true;
+ if (parts.length !== 1) {
+ onError(0, '%YAML directive should contain exactly one part');
+ return false;
+ }
+ const [version] = parts;
+ if (version === '1.1' || version === '1.2') {
+ this.yaml.version = version;
+ return true;
+ }
+ else {
+ const isValid = /^\d+\.\d+$/.test(version);
+ onError(6, `Unsupported YAML version ${version}`, isValid);
+ return false;
+ }
+ }
+ default:
+ onError(0, `Unknown directive ${name}`, true);
+ return false;
+ }
+ }
+ /**
+ * Resolves a tag, matching handles to those defined in %TAG directives.
+ *
+ * @returns Resolved tag, which may also be the non-specific tag `'!'` or a
+ * `'!local'` tag, or `null` if unresolvable.
+ */
+ tagName(source, onError) {
+ if (source === '!')
+ return '!'; // non-specific tag
+ if (source[0] !== '!') {
+ onError(`Not a valid tag: ${source}`);
+ return null;
+ }
+ if (source[1] === '<') {
+ const verbatim = source.slice(2, -1);
+ if (verbatim === '!' || verbatim === '!!') {
+ onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);
+ return null;
+ }
+ if (source[source.length - 1] !== '>')
+ onError('Verbatim tags must end with a >');
+ return verbatim;
+ }
+ const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);
+ if (!suffix)
+ onError(`The ${source} tag has no suffix`);
+ const prefix = this.tags[handle];
+ if (prefix)
+ return prefix + decodeURIComponent(suffix);
+ if (handle === '!')
+ return source; // local tag
+ onError(`Could not resolve tag: ${source}`);
+ return null;
+ }
+ /**
+ * Given a fully resolved tag, returns its printable string form,
+ * taking into account current tag prefixes and defaults.
+ */
+ tagString(tag) {
+ for (const [handle, prefix] of Object.entries(this.tags)) {
+ if (tag.startsWith(prefix))
+ return handle + escapeTagName(tag.substring(prefix.length));
+ }
+ return tag[0] === '!' ? tag : `!<${tag}>`;
+ }
+ toString(doc) {
+ const lines = this.yaml.explicit
+ ? [`%YAML ${this.yaml.version || '1.2'}`]
+ : [];
+ const tagEntries = Object.entries(this.tags);
+ let tagNames;
+ if (doc && tagEntries.length > 0 && Node.isNode(doc.contents)) {
+ const tags = {};
+ visit.visit(doc.contents, (_key, node) => {
+ if (Node.isNode(node) && node.tag)
+ tags[node.tag] = true;
+ });
+ tagNames = Object.keys(tags);
+ }
+ else
+ tagNames = [];
+ for (const [handle, prefix] of tagEntries) {
+ if (handle === '!!' && prefix === 'tag:yaml.org,2002:')
+ continue;
+ if (!doc || tagNames.some(tn => tn.startsWith(prefix)))
+ lines.push(`%TAG ${handle} ${prefix}`);
+ }
+ return lines.join('\n');
+ }
+}
+Directives.defaultYaml = { explicit: false, version: '1.2' };
+Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' };
+
+exports.Directives = Directives;
diff --git a/node_modules/yaml/dist/errors.d.ts b/node_modules/yaml/dist/errors.d.ts
new file mode 100644
index 0000000..1ea1797
--- /dev/null
+++ b/node_modules/yaml/dist/errors.d.ts
@@ -0,0 +1,21 @@
+import type { LineCounter } from './parse/line-counter';
+export declare type ErrorCode = 'ALIAS_PROPS' | 'BAD_ALIAS' | 'BAD_DIRECTIVE' | 'BAD_DQ_ESCAPE' | 'BAD_INDENT' | 'BAD_PROP_ORDER' | 'BAD_SCALAR_START' | 'BLOCK_AS_IMPLICIT_KEY' | 'BLOCK_IN_FLOW' | 'DUPLICATE_KEY' | 'IMPOSSIBLE' | 'KEY_OVER_1024_CHARS' | 'MISSING_CHAR' | 'MULTILINE_IMPLICIT_KEY' | 'MULTIPLE_ANCHORS' | 'MULTIPLE_DOCS' | 'MULTIPLE_TAGS' | 'TAB_AS_INDENT' | 'TAG_RESOLVE_FAILED' | 'UNEXPECTED_TOKEN';
+export declare type LinePos = {
+ line: number;
+ col: number;
+};
+export declare class YAMLError extends Error {
+ name: 'YAMLParseError' | 'YAMLWarning';
+ code: ErrorCode;
+ message: string;
+ pos: [number, number];
+ linePos?: [LinePos] | [LinePos, LinePos];
+ constructor(name: YAMLError['name'], pos: [number, number], code: ErrorCode, message: string);
+}
+export declare class YAMLParseError extends YAMLError {
+ constructor(pos: [number, number], code: ErrorCode, message: string);
+}
+export declare class YAMLWarning extends YAMLError {
+ constructor(pos: [number, number], code: ErrorCode, message: string);
+}
+export declare const prettifyError: (src: string, lc: LineCounter) => (error: YAMLError) => void;
diff --git a/node_modules/yaml/dist/errors.js b/node_modules/yaml/dist/errors.js
new file mode 100644
index 0000000..39f49fa
--- /dev/null
+++ b/node_modules/yaml/dist/errors.js
@@ -0,0 +1,62 @@
+'use strict';
+
+class YAMLError extends Error {
+ constructor(name, pos, code, message) {
+ super();
+ this.name = name;
+ this.code = code;
+ this.message = message;
+ this.pos = pos;
+ }
+}
+class YAMLParseError extends YAMLError {
+ constructor(pos, code, message) {
+ super('YAMLParseError', pos, code, message);
+ }
+}
+class YAMLWarning extends YAMLError {
+ constructor(pos, code, message) {
+ super('YAMLWarning', pos, code, message);
+ }
+}
+const prettifyError = (src, lc) => (error) => {
+ if (error.pos[0] === -1)
+ return;
+ error.linePos = error.pos.map(pos => lc.linePos(pos));
+ const { line, col } = error.linePos[0];
+ error.message += ` at line ${line}, column ${col}`;
+ let ci = col - 1;
+ let lineStr = src
+ .substring(lc.lineStarts[line - 1], lc.lineStarts[line])
+ .replace(/[\n\r]+$/, '');
+ // Trim to max 80 chars, keeping col position near the middle
+ if (ci >= 60 && lineStr.length > 80) {
+ const trimStart = Math.min(ci - 39, lineStr.length - 79);
+ lineStr = '…' + lineStr.substring(trimStart);
+ ci -= trimStart - 1;
+ }
+ if (lineStr.length > 80)
+ lineStr = lineStr.substring(0, 79) + '…';
+ // Include previous line in context if pointing at line start
+ if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {
+ // Regexp won't match if start is trimmed
+ let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);
+ if (prev.length > 80)
+ prev = prev.substring(0, 79) + '…\n';
+ lineStr = prev + lineStr;
+ }
+ if (/[^ ]/.test(lineStr)) {
+ let count = 1;
+ const end = error.linePos[1];
+ if (end && end.line === line && end.col > col) {
+ count = Math.min(end.col - col, 80 - ci);
+ }
+ const pointer = ' '.repeat(ci) + '^'.repeat(count);
+ error.message += `:\n\n${lineStr}\n${pointer}\n`;
+ }
+};
+
+exports.YAMLError = YAMLError;
+exports.YAMLParseError = YAMLParseError;
+exports.YAMLWarning = YAMLWarning;
+exports.prettifyError = prettifyError;
diff --git a/node_modules/yaml/dist/index.d.ts b/node_modules/yaml/dist/index.d.ts
new file mode 100644
index 0000000..de561ca
--- /dev/null
+++ b/node_modules/yaml/dist/index.d.ts
@@ -0,0 +1,19 @@
+export { Composer } from './compose/composer.js';
+export { Document } from './doc/Document.js';
+export { Schema } from './schema/Schema.js';
+export { ErrorCode, YAMLError, YAMLParseError, YAMLWarning } from './errors.js';
+export { Alias } from './nodes/Alias.js';
+export { isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq, Node, ParsedNode, Range } from './nodes/Node.js';
+export { Pair } from './nodes/Pair.js';
+export { Scalar } from './nodes/Scalar.js';
+export { YAMLMap } from './nodes/YAMLMap.js';
+export { YAMLSeq } from './nodes/YAMLSeq.js';
+export type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from './options.js';
+export * as CST from './parse/cst.js';
+export { Lexer } from './parse/lexer.js';
+export { LineCounter } from './parse/line-counter.js';
+export { Parser } from './parse/parser.js';
+export { EmptyStream, parse, parseAllDocuments, parseDocument, stringify } from './public-api.js';
+export type { TagId, Tags } from './schema/tags';
+export type { CollectionTag, ScalarTag } from './schema/types';
+export { asyncVisitor, asyncVisitorFn, visit, visitAsync, visitor, visitorFn } from './visit.js';
diff --git a/node_modules/yaml/dist/index.js b/node_modules/yaml/dist/index.js
new file mode 100644
index 0000000..b013fae
--- /dev/null
+++ b/node_modules/yaml/dist/index.js
@@ -0,0 +1,50 @@
+'use strict';
+
+var composer = require('./compose/composer.js');
+var Document = require('./doc/Document.js');
+var Schema = require('./schema/Schema.js');
+var errors = require('./errors.js');
+var Alias = require('./nodes/Alias.js');
+var Node = require('./nodes/Node.js');
+var Pair = require('./nodes/Pair.js');
+var Scalar = require('./nodes/Scalar.js');
+var YAMLMap = require('./nodes/YAMLMap.js');
+var YAMLSeq = require('./nodes/YAMLSeq.js');
+var cst = require('./parse/cst.js');
+var lexer = require('./parse/lexer.js');
+var lineCounter = require('./parse/line-counter.js');
+var parser = require('./parse/parser.js');
+var publicApi = require('./public-api.js');
+var visit = require('./visit.js');
+
+
+
+exports.Composer = composer.Composer;
+exports.Document = Document.Document;
+exports.Schema = Schema.Schema;
+exports.YAMLError = errors.YAMLError;
+exports.YAMLParseError = errors.YAMLParseError;
+exports.YAMLWarning = errors.YAMLWarning;
+exports.Alias = Alias.Alias;
+exports.isAlias = Node.isAlias;
+exports.isCollection = Node.isCollection;
+exports.isDocument = Node.isDocument;
+exports.isMap = Node.isMap;
+exports.isNode = Node.isNode;
+exports.isPair = Node.isPair;
+exports.isScalar = Node.isScalar;
+exports.isSeq = Node.isSeq;
+exports.Pair = Pair.Pair;
+exports.Scalar = Scalar.Scalar;
+exports.YAMLMap = YAMLMap.YAMLMap;
+exports.YAMLSeq = YAMLSeq.YAMLSeq;
+exports.CST = cst;
+exports.Lexer = lexer.Lexer;
+exports.LineCounter = lineCounter.LineCounter;
+exports.Parser = parser.Parser;
+exports.parse = publicApi.parse;
+exports.parseAllDocuments = publicApi.parseAllDocuments;
+exports.parseDocument = publicApi.parseDocument;
+exports.stringify = publicApi.stringify;
+exports.visit = visit.visit;
+exports.visitAsync = visit.visitAsync;
diff --git a/node_modules/yaml/dist/log.d.ts b/node_modules/yaml/dist/log.d.ts
new file mode 100644
index 0000000..d5f4e07
--- /dev/null
+++ b/node_modules/yaml/dist/log.d.ts
@@ -0,0 +1,3 @@
+export declare type LogLevelId = 'silent' | 'error' | 'warn' | 'debug';
+export declare function debug(logLevel: LogLevelId, ...messages: any[]): void;
+export declare function warn(logLevel: LogLevelId, warning: string | Error): void;
diff --git a/node_modules/yaml/dist/log.js b/node_modules/yaml/dist/log.js
new file mode 100644
index 0000000..fac7d5a
--- /dev/null
+++ b/node_modules/yaml/dist/log.js
@@ -0,0 +1,17 @@
+'use strict';
+
+function debug(logLevel, ...messages) {
+ if (logLevel === 'debug')
+ console.log(...messages);
+}
+function warn(logLevel, warning) {
+ if (logLevel === 'debug' || logLevel === 'warn') {
+ if (typeof process !== 'undefined' && process.emitWarning)
+ process.emitWarning(warning);
+ else
+ console.warn(warning);
+ }
+}
+
+exports.debug = debug;
+exports.warn = warn;
diff --git a/node_modules/yaml/dist/node_modules/tslib/tslib.es6.js b/node_modules/yaml/dist/node_modules/tslib/tslib.es6.js
new file mode 100644
index 0000000..0962250
--- /dev/null
+++ b/node_modules/yaml/dist/node_modules/tslib/tslib.es6.js
@@ -0,0 +1,76 @@
+'use strict';
+
+/*! *****************************************************************************
+Copyright (c) Microsoft Corporation.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+***************************************************************************** */
+/* global Reflect, Promise */
+
+var extendStatics = function(d, b) {
+ extendStatics = Object.setPrototypeOf ||
+ ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+ function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
+ return extendStatics(d, b);
+};
+
+function __extends(d, b) {
+ if (typeof b !== "function" && b !== null)
+ throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
+ extendStatics(d, b);
+ function __() { this.constructor = d; }
+ d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+}
+
+function __generator(thisArg, body) {
+ var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
+ return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
+ function verb(n) { return function (v) { return step([n, v]); }; }
+ function step(op) {
+ if (f) throw new TypeError("Generator is already executing.");
+ while (_) try {
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
+ if (y = 0, t) op = [op[0] & 2, t.value];
+ switch (op[0]) {
+ case 0: case 1: t = op; break;
+ case 4: _.label++; return { value: op[1], done: false };
+ case 5: _.label++; y = op[1]; op = [0]; continue;
+ case 7: op = _.ops.pop(); _.trys.pop(); continue;
+ default:
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
+ if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
+ if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
+ if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
+ if (t[2]) _.ops.pop();
+ _.trys.pop(); continue;
+ }
+ op = body.call(thisArg, _);
+ } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
+ if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
+ }
+}
+
+function __values(o) {
+ var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
+ if (m) return m.call(o);
+ if (o && typeof o.length === "number") return {
+ next: function () {
+ if (o && i >= o.length) o = void 0;
+ return { value: o && o[i++], done: !o };
+ }
+ };
+ throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
+}
+
+exports.__extends = __extends;
+exports.__generator = __generator;
+exports.__values = __values;
diff --git a/node_modules/yaml/dist/nodes/Alias.d.ts b/node_modules/yaml/dist/nodes/Alias.d.ts
new file mode 100644
index 0000000..cdb0f42
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Alias.d.ts
@@ -0,0 +1,28 @@
+import type { Document } from '../doc/Document.js';
+import type { FlowScalar } from '../parse/cst.js';
+import type { StringifyContext } from '../stringify/stringify.js';
+import { NodeBase, Range } from './Node.js';
+import type { Scalar } from './Scalar';
+import type { ToJSContext } from './toJS.js';
+import type { YAMLMap } from './YAMLMap.js';
+import type { YAMLSeq } from './YAMLSeq.js';
+export declare namespace Alias {
+ interface Parsed extends Alias {
+ range: Range;
+ srcToken?: FlowScalar & {
+ type: 'alias';
+ };
+ }
+}
+export declare class Alias extends NodeBase {
+ source: string;
+ anchor?: never;
+ constructor(source: string);
+ /**
+ * Resolve the value of this alias within `doc`, finding the last
+ * instance of the `source` anchor before this node.
+ */
+ resolve(doc: Document): Scalar | YAMLMap | YAMLSeq | undefined;
+ toJSON(_arg?: unknown, ctx?: ToJSContext): unknown;
+ toString(ctx?: StringifyContext, _onComment?: () => void, _onChompKeep?: () => void): string;
+}
diff --git a/node_modules/yaml/dist/nodes/Alias.js b/node_modules/yaml/dist/nodes/Alias.js
new file mode 100644
index 0000000..75853cf
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Alias.js
@@ -0,0 +1,96 @@
+'use strict';
+
+var anchors = require('../doc/anchors.js');
+var visit = require('../visit.js');
+var Node = require('./Node.js');
+
+class Alias extends Node.NodeBase {
+ constructor(source) {
+ super(Node.ALIAS);
+ this.source = source;
+ Object.defineProperty(this, 'tag', {
+ set() {
+ throw new Error('Alias nodes cannot have tags');
+ }
+ });
+ }
+ /**
+ * Resolve the value of this alias within `doc`, finding the last
+ * instance of the `source` anchor before this node.
+ */
+ resolve(doc) {
+ let found = undefined;
+ visit.visit(doc, {
+ Node: (_key, node) => {
+ if (node === this)
+ return visit.visit.BREAK;
+ if (node.anchor === this.source)
+ found = node;
+ }
+ });
+ return found;
+ }
+ toJSON(_arg, ctx) {
+ if (!ctx)
+ return { source: this.source };
+ const { anchors, doc, maxAliasCount } = ctx;
+ const source = this.resolve(doc);
+ if (!source) {
+ const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
+ throw new ReferenceError(msg);
+ }
+ const data = anchors.get(source);
+ /* istanbul ignore if */
+ if (!data || data.res === undefined) {
+ const msg = 'This should not happen: Alias anchor was not resolved?';
+ throw new ReferenceError(msg);
+ }
+ if (maxAliasCount >= 0) {
+ data.count += 1;
+ if (data.aliasCount === 0)
+ data.aliasCount = getAliasCount(doc, source, anchors);
+ if (data.count * data.aliasCount > maxAliasCount) {
+ const msg = 'Excessive alias count indicates a resource exhaustion attack';
+ throw new ReferenceError(msg);
+ }
+ }
+ return data.res;
+ }
+ toString(ctx, _onComment, _onChompKeep) {
+ const src = `*${this.source}`;
+ if (ctx) {
+ anchors.anchorIsValid(this.source);
+ if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {
+ const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
+ throw new Error(msg);
+ }
+ if (ctx.implicitKey)
+ return `${src} `;
+ }
+ return src;
+ }
+}
+function getAliasCount(doc, node, anchors) {
+ if (Node.isAlias(node)) {
+ const source = node.resolve(doc);
+ const anchor = anchors && source && anchors.get(source);
+ return anchor ? anchor.count * anchor.aliasCount : 0;
+ }
+ else if (Node.isCollection(node)) {
+ let count = 0;
+ for (const item of node.items) {
+ const c = getAliasCount(doc, item, anchors);
+ if (c > count)
+ count = c;
+ }
+ return count;
+ }
+ else if (Node.isPair(node)) {
+ const kc = getAliasCount(doc, node.key, anchors);
+ const vc = getAliasCount(doc, node.value, anchors);
+ return Math.max(kc, vc);
+ }
+ return 1;
+}
+
+exports.Alias = Alias;
diff --git a/node_modules/yaml/dist/nodes/Collection.d.ts b/node_modules/yaml/dist/nodes/Collection.d.ts
new file mode 100644
index 0000000..3515c8f
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Collection.d.ts
@@ -0,0 +1,73 @@
+import type { Schema } from '../schema/Schema.js';
+import { NodeBase, NODE_TYPE } from './Node.js';
+export declare function collectionFromPath(schema: Schema, path: unknown[], value: unknown): import("./Node.js").Node;
+export declare const isEmptyPath: (path: Iterable<unknown> | null | undefined) => boolean;
+export declare abstract class Collection extends NodeBase {
+ static maxFlowStringSingleLineLength: number;
+ schema: Schema | undefined;
+ [NODE_TYPE]: symbol;
+ items: unknown[];
+ /** An optional anchor on this node. Used by alias nodes. */
+ anchor?: string;
+ /**
+ * If true, stringify this and all child nodes using flow rather than
+ * block styles.
+ */
+ flow?: boolean;
+ constructor(type: symbol, schema?: Schema);
+ /**
+ * Create a copy of this collection.
+ *
+ * @param schema - If defined, overwrites the original's schema
+ */
+ clone(schema?: Schema): Collection;
+ /** Adds a value to the collection. */
+ abstract add(value: unknown): void;
+ /**
+ * Removes a value from the collection.
+ * @returns `true` if the item was found and removed.
+ */
+ abstract delete(key: unknown): boolean;
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ abstract get(key: unknown, keepScalar?: boolean): unknown;
+ /**
+ * Checks if the collection includes a value with the key `key`.
+ */
+ abstract has(key: unknown): boolean;
+ /**
+ * Sets a value in this collection. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ abstract set(key: unknown, value: unknown): void;
+ /**
+ * Adds a value to the collection. For `!!map` and `!!omap` the value must
+ * be a Pair instance or a `{ key, value }` object, which may not have a key
+ * that already exists in the map.
+ */
+ addIn(path: Iterable<unknown>, value: unknown): void;
+ /**
+ * Removes a value from the collection.
+ * @returns `true` if the item was found and removed.
+ */
+ deleteIn(path: Iterable<unknown>): boolean;
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ getIn(path: Iterable<unknown>, keepScalar?: boolean): unknown;
+ hasAllNullValues(allowScalar?: boolean): boolean;
+ /**
+ * Checks if the collection includes a value with the key `key`.
+ */
+ hasIn(path: Iterable<unknown>): boolean;
+ /**
+ * Sets a value in this collection. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ setIn(path: Iterable<unknown>, value: unknown): void;
+}
diff --git a/node_modules/yaml/dist/nodes/Collection.js b/node_modules/yaml/dist/nodes/Collection.js
new file mode 100644
index 0000000..3c56597
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Collection.js
@@ -0,0 +1,150 @@
+'use strict';
+
+var createNode = require('../doc/createNode.js');
+var Node = require('./Node.js');
+
+function collectionFromPath(schema, path, value) {
+ let v = value;
+ for (let i = path.length - 1; i >= 0; --i) {
+ const k = path[i];
+ if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {
+ const a = [];
+ a[k] = v;
+ v = a;
+ }
+ else {
+ v = new Map([[k, v]]);
+ }
+ }
+ return createNode.createNode(v, undefined, {
+ aliasDuplicateObjects: false,
+ keepUndefined: false,
+ onAnchor: () => {
+ throw new Error('This should not happen, please report a bug.');
+ },
+ schema,
+ sourceObjects: new Map()
+ });
+}
+// null, undefined, or an empty non-string iterable (e.g. [])
+const isEmptyPath = (path) => path == null ||
+ (typeof path === 'object' && !!path[Symbol.iterator]().next().done);
+class Collection extends Node.NodeBase {
+ constructor(type, schema) {
+ super(type);
+ Object.defineProperty(this, 'schema', {
+ value: schema,
+ configurable: true,
+ enumerable: false,
+ writable: true
+ });
+ }
+ /**
+ * Create a copy of this collection.
+ *
+ * @param schema - If defined, overwrites the original's schema
+ */
+ clone(schema) {
+ const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
+ if (schema)
+ copy.schema = schema;
+ copy.items = copy.items.map(it => Node.isNode(it) || Node.isPair(it) ? it.clone(schema) : it);
+ if (this.range)
+ copy.range = this.range.slice();
+ return copy;
+ }
+ /**
+ * Adds a value to the collection. For `!!map` and `!!omap` the value must
+ * be a Pair instance or a `{ key, value }` object, which may not have a key
+ * that already exists in the map.
+ */
+ addIn(path, value) {
+ if (isEmptyPath(path))
+ this.add(value);
+ else {
+ const [key, ...rest] = path;
+ const node = this.get(key, true);
+ if (Node.isCollection(node))
+ node.addIn(rest, value);
+ else if (node === undefined && this.schema)
+ this.set(key, collectionFromPath(this.schema, rest, value));
+ else
+ throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
+ }
+ }
+ /**
+ * Removes a value from the collection.
+ * @returns `true` if the item was found and removed.
+ */
+ deleteIn(path) {
+ const [key, ...rest] = path;
+ if (rest.length === 0)
+ return this.delete(key);
+ const node = this.get(key, true);
+ if (Node.isCollection(node))
+ return node.deleteIn(rest);
+ else
+ throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
+ }
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ getIn(path, keepScalar) {
+ const [key, ...rest] = path;
+ const node = this.get(key, true);
+ if (rest.length === 0)
+ return !keepScalar && Node.isScalar(node) ? node.value : node;
+ else
+ return Node.isCollection(node) ? node.getIn(rest, keepScalar) : undefined;
+ }
+ hasAllNullValues(allowScalar) {
+ return this.items.every(node => {
+ if (!Node.isPair(node))
+ return false;
+ const n = node.value;
+ return (n == null ||
+ (allowScalar &&
+ Node.isScalar(n) &&
+ n.value == null &&
+ !n.commentBefore &&
+ !n.comment &&
+ !n.tag));
+ });
+ }
+ /**
+ * Checks if the collection includes a value with the key `key`.
+ */
+ hasIn(path) {
+ const [key, ...rest] = path;
+ if (rest.length === 0)
+ return this.has(key);
+ const node = this.get(key, true);
+ return Node.isCollection(node) ? node.hasIn(rest) : false;
+ }
+ /**
+ * Sets a value in this collection. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ setIn(path, value) {
+ const [key, ...rest] = path;
+ if (rest.length === 0) {
+ this.set(key, value);
+ }
+ else {
+ const node = this.get(key, true);
+ if (Node.isCollection(node))
+ node.setIn(rest, value);
+ else if (node === undefined && this.schema)
+ this.set(key, collectionFromPath(this.schema, rest, value));
+ else
+ throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
+ }
+ }
+}
+Collection.maxFlowStringSingleLineLength = 60;
+
+exports.Collection = Collection;
+exports.collectionFromPath = collectionFromPath;
+exports.isEmptyPath = isEmptyPath;
diff --git a/node_modules/yaml/dist/nodes/Node.d.ts b/node_modules/yaml/dist/nodes/Node.d.ts
new file mode 100644
index 0000000..719c65b
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Node.d.ts
@@ -0,0 +1,53 @@
+import type { Document } from '../doc/Document.js';
+import { Token } from '../parse/cst.js';
+import type { StringifyContext } from '../stringify/stringify.js';
+import type { Alias } from './Alias.js';
+import type { Pair } from './Pair.js';
+import type { Scalar } from './Scalar.js';
+import type { YAMLMap } from './YAMLMap.js';
+import type { YAMLSeq } from './YAMLSeq.js';
+export declare type Node = Alias | Scalar | YAMLMap | YAMLSeq;
+export declare type ParsedNode = Alias.Parsed | Scalar.Parsed | YAMLMap.Parsed | YAMLSeq.Parsed;
+export declare type Range = [number, number, number];
+export declare const ALIAS: unique symbol;
+export declare const DOC: unique symbol;
+export declare const MAP: unique symbol;
+export declare const PAIR: unique symbol;
+export declare const SCALAR: unique symbol;
+export declare const SEQ: unique symbol;
+export declare const NODE_TYPE: unique symbol;
+export declare const isAlias: (node: any) => node is Alias;
+export declare const isDocument: (node: any) => node is Document<unknown>;
+export declare const isMap: (node: any) => node is YAMLMap<unknown, unknown>;
+export declare const isPair: (node: any) => node is Pair<unknown, unknown>;
+export declare const isScalar: (node: any) => node is Scalar<unknown>;
+export declare const isSeq: (node: any) => node is YAMLSeq<unknown>;
+export declare function isCollection(node: any): node is YAMLMap | YAMLSeq;
+export declare function isNode(node: any): node is Node;
+export declare const hasAnchor: (node: unknown) => node is Scalar<unknown> | YAMLMap<unknown, unknown> | YAMLSeq<unknown>;
+export declare abstract class NodeBase {
+ readonly [NODE_TYPE]: symbol;
+ /** A comment on or immediately after this */
+ comment?: string | null;
+ /** A comment before this */
+ commentBefore?: string | null;
+ /**
+ * The `[start, value-end, node-end]` character offsets for the part of the
+ * source parsed into this node (undefined if not parsed). The `value-end`
+ * and `node-end` positions are themselves not included in their respective
+ * ranges.
+ */
+ range?: Range | null;
+ /** A blank line before this node and its commentBefore */
+ spaceBefore?: boolean;
+ /** The CST token that was composed into this node. */
+ srcToken?: Token;
+ /** A fully qualified tag, if required */
+ tag?: string;
+ /** A plain JS representation of this node */
+ abstract toJSON(): any;
+ abstract toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
+ constructor(type: symbol);
+ /** Create a copy of this node. */
+ clone(): NodeBase;
+}
diff --git a/node_modules/yaml/dist/nodes/Node.js b/node_modules/yaml/dist/nodes/Node.js
new file mode 100644
index 0000000..4ddff6a
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Node.js
@@ -0,0 +1,66 @@
+'use strict';
+
+const ALIAS = Symbol.for('yaml.alias');
+const DOC = Symbol.for('yaml.document');
+const MAP = Symbol.for('yaml.map');
+const PAIR = Symbol.for('yaml.pair');
+const SCALAR = Symbol.for('yaml.scalar');
+const SEQ = Symbol.for('yaml.seq');
+const NODE_TYPE = Symbol.for('yaml.node.type');
+const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;
+const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;
+const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;
+const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;
+const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;
+const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;
+function isCollection(node) {
+ if (node && typeof node === 'object')
+ switch (node[NODE_TYPE]) {
+ case MAP:
+ case SEQ:
+ return true;
+ }
+ return false;
+}
+function isNode(node) {
+ if (node && typeof node === 'object')
+ switch (node[NODE_TYPE]) {
+ case ALIAS:
+ case MAP:
+ case SCALAR:
+ case SEQ:
+ return true;
+ }
+ return false;
+}
+const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;
+class NodeBase {
+ constructor(type) {
+ Object.defineProperty(this, NODE_TYPE, { value: type });
+ }
+ /** Create a copy of this node. */
+ clone() {
+ const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
+ if (this.range)
+ copy.range = this.range.slice();
+ return copy;
+ }
+}
+
+exports.ALIAS = ALIAS;
+exports.DOC = DOC;
+exports.MAP = MAP;
+exports.NODE_TYPE = NODE_TYPE;
+exports.NodeBase = NodeBase;
+exports.PAIR = PAIR;
+exports.SCALAR = SCALAR;
+exports.SEQ = SEQ;
+exports.hasAnchor = hasAnchor;
+exports.isAlias = isAlias;
+exports.isCollection = isCollection;
+exports.isDocument = isDocument;
+exports.isMap = isMap;
+exports.isNode = isNode;
+exports.isPair = isPair;
+exports.isScalar = isScalar;
+exports.isSeq = isSeq;
diff --git a/node_modules/yaml/dist/nodes/Pair.d.ts b/node_modules/yaml/dist/nodes/Pair.d.ts
new file mode 100644
index 0000000..da3d968
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Pair.d.ts
@@ -0,0 +1,21 @@
+import { CreateNodeContext } from '../doc/createNode.js';
+import type { CollectionItem } from '../parse/cst.js';
+import type { Schema } from '../schema/Schema.js';
+import type { StringifyContext } from '../stringify/stringify.js';
+import { addPairToJSMap } from './addPairToJSMap.js';
+import { NODE_TYPE } from './Node.js';
+import type { ToJSContext } from './toJS.js';
+export declare function createPair(key: unknown, value: unknown, ctx: CreateNodeContext): Pair<import("./Node.js").Node, import("./Alias.js").Alias | import("./Scalar.js").Scalar<unknown> | import("./YAMLMap.js").YAMLMap<unknown, unknown> | import("./YAMLSeq.js").YAMLSeq<unknown>>;
+export declare class Pair<K = unknown, V = unknown> {
+ readonly [NODE_TYPE]: symbol;
+ /** Always Node or null when parsed, but can be set to anything. */
+ key: K;
+ /** Always Node or null when parsed, but can be set to anything. */
+ value: V | null;
+ /** The CST token that was composed into this pair. */
+ srcToken?: CollectionItem;
+ constructor(key: K, value?: V | null);
+ clone(schema?: Schema): Pair<K, V>;
+ toJSON(_?: unknown, ctx?: ToJSContext): ReturnType<typeof addPairToJSMap>;
+ toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
+}
diff --git a/node_modules/yaml/dist/nodes/Pair.js b/node_modules/yaml/dist/nodes/Pair.js
new file mode 100644
index 0000000..c0c22e9
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Pair.js
@@ -0,0 +1,39 @@
+'use strict';
+
+var createNode = require('../doc/createNode.js');
+var stringifyPair = require('../stringify/stringifyPair.js');
+var addPairToJSMap = require('./addPairToJSMap.js');
+var Node = require('./Node.js');
+
+function createPair(key, value, ctx) {
+ const k = createNode.createNode(key, undefined, ctx);
+ const v = createNode.createNode(value, undefined, ctx);
+ return new Pair(k, v);
+}
+class Pair {
+ constructor(key, value = null) {
+ Object.defineProperty(this, Node.NODE_TYPE, { value: Node.PAIR });
+ this.key = key;
+ this.value = value;
+ }
+ clone(schema) {
+ let { key, value } = this;
+ if (Node.isNode(key))
+ key = key.clone(schema);
+ if (Node.isNode(value))
+ value = value.clone(schema);
+ return new Pair(key, value);
+ }
+ toJSON(_, ctx) {
+ const pair = (ctx === null || ctx === void 0 ? void 0 : ctx.mapAsMap) ? new Map() : {};
+ return addPairToJSMap.addPairToJSMap(ctx, pair, this);
+ }
+ toString(ctx, onComment, onChompKeep) {
+ return (ctx === null || ctx === void 0 ? void 0 : ctx.doc)
+ ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep)
+ : JSON.stringify(this);
+ }
+}
+
+exports.Pair = Pair;
+exports.createPair = createPair;
diff --git a/node_modules/yaml/dist/nodes/Scalar.d.ts b/node_modules/yaml/dist/nodes/Scalar.d.ts
new file mode 100644
index 0000000..dd330b2
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Scalar.d.ts
@@ -0,0 +1,42 @@
+import type { BlockScalar, FlowScalar } from '../parse/cst.js';
+import { NodeBase, Range } from './Node.js';
+import { ToJSContext } from './toJS.js';
+export declare const isScalarValue: (value: unknown) => boolean;
+export declare namespace Scalar {
+ interface Parsed extends Scalar {
+ range: Range;
+ source: string;
+ srcToken?: FlowScalar | BlockScalar;
+ }
+ type BLOCK_FOLDED = 'BLOCK_FOLDED';
+ type BLOCK_LITERAL = 'BLOCK_LITERAL';
+ type PLAIN = 'PLAIN';
+ type QUOTE_DOUBLE = 'QUOTE_DOUBLE';
+ type QUOTE_SINGLE = 'QUOTE_SINGLE';
+ type Type = BLOCK_FOLDED | BLOCK_LITERAL | PLAIN | QUOTE_DOUBLE | QUOTE_SINGLE;
+}
+export declare class Scalar<T = unknown> extends NodeBase {
+ static readonly BLOCK_FOLDED = "BLOCK_FOLDED";
+ static readonly BLOCK_LITERAL = "BLOCK_LITERAL";
+ static readonly PLAIN = "PLAIN";
+ static readonly QUOTE_DOUBLE = "QUOTE_DOUBLE";
+ static readonly QUOTE_SINGLE = "QUOTE_SINGLE";
+ value: T;
+ /** An optional anchor on this node. Used by alias nodes. */
+ anchor?: string;
+ /**
+ * By default (undefined), numbers use decimal notation.
+ * The YAML 1.2 core schema only supports 'HEX' and 'OCT'.
+ * The YAML 1.1 schema also supports 'BIN' and 'TIME'
+ */
+ format?: string;
+ /** If `value` is a number, use this value when stringifying this node. */
+ minFractionDigits?: number;
+ /** Set during parsing to the source string value */
+ source?: string;
+ /** The scalar style used for the node's string representation */
+ type?: Scalar.Type;
+ constructor(value: T);
+ toJSON(arg?: any, ctx?: ToJSContext): any;
+ toString(): string;
+}
diff --git a/node_modules/yaml/dist/nodes/Scalar.js b/node_modules/yaml/dist/nodes/Scalar.js
new file mode 100644
index 0000000..2d46471
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Scalar.js
@@ -0,0 +1,26 @@
+'use strict';
+
+var Node = require('./Node.js');
+var toJS = require('./toJS.js');
+
+const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');
+class Scalar extends Node.NodeBase {
+ constructor(value) {
+ super(Node.SCALAR);
+ this.value = value;
+ }
+ toJSON(arg, ctx) {
+ return (ctx === null || ctx === void 0 ? void 0 : ctx.keep) ? this.value : toJS.toJS(this.value, arg, ctx);
+ }
+ toString() {
+ return String(this.value);
+ }
+}
+Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED';
+Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL';
+Scalar.PLAIN = 'PLAIN';
+Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';
+Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE';
+
+exports.Scalar = Scalar;
+exports.isScalarValue = isScalarValue;
diff --git a/node_modules/yaml/dist/nodes/YAMLMap.d.ts b/node_modules/yaml/dist/nodes/YAMLMap.d.ts
new file mode 100644
index 0000000..f13d7e5
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/YAMLMap.d.ts
@@ -0,0 +1,43 @@
+import type { BlockMap, FlowCollection } from '../parse/cst.js';
+import type { Schema } from '../schema/Schema.js';
+import type { StringifyContext } from '../stringify/stringify.js';
+import { Collection } from './Collection.js';
+import { ParsedNode, Range } from './Node.js';
+import { Pair } from './Pair.js';
+import type { ToJSContext } from './toJS.js';
+export declare function findPair<K = unknown, V = unknown>(items: Iterable<Pair<K, V>>, key: unknown): Pair<K, V> | undefined;
+export declare namespace YAMLMap {
+ interface Parsed<K extends ParsedNode = ParsedNode, V extends ParsedNode | null = ParsedNode | null> extends YAMLMap<K, V> {
+ items: Pair<K, V>[];
+ range: Range;
+ srcToken?: BlockMap | FlowCollection;
+ }
+}
+export declare class YAMLMap<K = unknown, V = unknown> extends Collection {
+ static get tagName(): 'tag:yaml.org,2002:map';
+ items: Pair<K, V>[];
+ constructor(schema?: Schema);
+ /**
+ * Adds a value to the collection.
+ *
+ * @param overwrite - If not set `true`, using a key that is already in the
+ * collection will throw. Otherwise, overwrites the previous value.
+ */
+ add(pair: Pair<K, V> | {
+ key: K;
+ value: V;
+ }, overwrite?: boolean): void;
+ delete(key: K): boolean;
+ get(key: K, keepScalar?: boolean): unknown;
+ has(key: K): boolean;
+ set(key: K, value: V): void;
+ /**
+ * @param ctx - Conversion context, originally set in Document#toJS()
+ * @param {Class} Type - If set, forces the returned collection type
+ * @returns Instance of Type, Map, or Object
+ */
+ toJSON<T = unknown>(_?: unknown, ctx?: ToJSContext, Type?: {
+ new (): T;
+ }): any;
+ toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
+}
diff --git a/node_modules/yaml/dist/nodes/YAMLMap.js b/node_modules/yaml/dist/nodes/YAMLMap.js
new file mode 100644
index 0000000..9b4a990
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/YAMLMap.js
@@ -0,0 +1,120 @@
+'use strict';
+
+var stringifyCollection = require('../stringify/stringifyCollection.js');
+var addPairToJSMap = require('./addPairToJSMap.js');
+var Collection = require('./Collection.js');
+var Node = require('./Node.js');
+var Pair = require('./Pair.js');
+var Scalar = require('./Scalar.js');
+
+function findPair(items, key) {
+ const k = Node.isScalar(key) ? key.value : key;
+ for (const it of items) {
+ if (Node.isPair(it)) {
+ if (it.key === key || it.key === k)
+ return it;
+ if (Node.isScalar(it.key) && it.key.value === k)
+ return it;
+ }
+ }
+ return undefined;
+}
+class YAMLMap extends Collection.Collection {
+ constructor(schema) {
+ super(Node.MAP, schema);
+ this.items = [];
+ }
+ static get tagName() {
+ return 'tag:yaml.org,2002:map';
+ }
+ /**
+ * Adds a value to the collection.
+ *
+ * @param overwrite - If not set `true`, using a key that is already in the
+ * collection will throw. Otherwise, overwrites the previous value.
+ */
+ add(pair, overwrite) {
+ var _a;
+ let _pair;
+ if (Node.isPair(pair))
+ _pair = pair;
+ else if (!pair || typeof pair !== 'object' || !('key' in pair)) {
+ // In TypeScript, this never happens.
+ _pair = new Pair.Pair(pair, pair.value);
+ }
+ else
+ _pair = new Pair.Pair(pair.key, pair.value);
+ const prev = findPair(this.items, _pair.key);
+ const sortEntries = (_a = this.schema) === null || _a === void 0 ? void 0 : _a.sortMapEntries;
+ if (prev) {
+ if (!overwrite)
+ throw new Error(`Key ${_pair.key} already set`);
+ // For scalars, keep the old node & its comments and anchors
+ if (Node.isScalar(prev.value) && Scalar.isScalarValue(_pair.value))
+ prev.value.value = _pair.value;
+ else
+ prev.value = _pair.value;
+ }
+ else if (sortEntries) {
+ const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);
+ if (i === -1)
+ this.items.push(_pair);
+ else
+ this.items.splice(i, 0, _pair);
+ }
+ else {
+ this.items.push(_pair);
+ }
+ }
+ delete(key) {
+ const it = findPair(this.items, key);
+ if (!it)
+ return false;
+ const del = this.items.splice(this.items.indexOf(it), 1);
+ return del.length > 0;
+ }
+ get(key, keepScalar) {
+ const it = findPair(this.items, key);
+ const node = it === null || it === void 0 ? void 0 : it.value;
+ return !keepScalar && Node.isScalar(node) ? node.value : node;
+ }
+ has(key) {
+ return !!findPair(this.items, key);
+ }
+ set(key, value) {
+ this.add(new Pair.Pair(key, value), true);
+ }
+ /**
+ * @param ctx - Conversion context, originally set in Document#toJS()
+ * @param {Class} Type - If set, forces the returned collection type
+ * @returns Instance of Type, Map, or Object
+ */
+ toJSON(_, ctx, Type) {
+ const map = Type ? new Type() : (ctx === null || ctx === void 0 ? void 0 : ctx.mapAsMap) ? new Map() : {};
+ if (ctx === null || ctx === void 0 ? void 0 : ctx.onCreate)
+ ctx.onCreate(map);
+ for (const item of this.items)
+ addPairToJSMap.addPairToJSMap(ctx, map, item);
+ return map;
+ }
+ toString(ctx, onComment, onChompKeep) {
+ if (!ctx)
+ return JSON.stringify(this);
+ for (const item of this.items) {
+ if (!Node.isPair(item))
+ throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);
+ }
+ if (!ctx.allNullValues && this.hasAllNullValues(false))
+ ctx = Object.assign({}, ctx, { allNullValues: true });
+ return stringifyCollection.stringifyCollection(this, ctx, {
+ blockItemPrefix: '',
+ flowChars: { start: '{', end: '}' },
+ itemIndent: ctx.indent || '',
+ onChompKeep,
+ onComment
+ });
+ }
+}
+
+exports.YAMLMap = YAMLMap;
+exports.findPair = findPair;
diff --git a/node_modules/yaml/dist/nodes/YAMLSeq.d.ts b/node_modules/yaml/dist/nodes/YAMLSeq.d.ts
new file mode 100644
index 0000000..f892a97
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/YAMLSeq.d.ts
@@ -0,0 +1,55 @@
+import type { BlockSequence, FlowCollection } from '../parse/cst.js';
+import type { Schema } from '../schema/Schema.js';
+import type { StringifyContext } from '../stringify/stringify.js';
+import { Collection } from './Collection.js';
+import { ParsedNode, Range } from './Node.js';
+import type { Pair } from './Pair.js';
+import { ToJSContext } from './toJS.js';
+export declare namespace YAMLSeq {
+ interface Parsed<T extends ParsedNode | Pair<ParsedNode, ParsedNode | null> = ParsedNode> extends YAMLSeq<T> {
+ items: T[];
+ range: Range;
+ srcToken?: BlockSequence | FlowCollection;
+ }
+}
+export declare class YAMLSeq<T = unknown> extends Collection {
+ static get tagName(): 'tag:yaml.org,2002:seq';
+ items: T[];
+ constructor(schema?: Schema);
+ add(value: T): void;
+ /**
+ * Removes a value from the collection.
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ *
+ * @returns `true` if the item was found and removed.
+ */
+ delete(key: unknown): boolean;
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ */
+ get(key: unknown, keepScalar?: boolean): unknown;
+ /**
+ * Checks if the collection includes a value with the key `key`.
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ */
+ has(key: unknown): boolean;
+ /**
+ * Sets a value in this collection. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ *
+ * If `key` does not contain a representation of an integer, this will throw.
+ * It may be wrapped in a `Scalar`.
+ */
+ set(key: unknown, value: T): void;
+ toJSON(_?: unknown, ctx?: ToJSContext): unknown[];
+ toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
+}
diff --git a/node_modules/yaml/dist/nodes/YAMLSeq.js b/node_modules/yaml/dist/nodes/YAMLSeq.js
new file mode 100644
index 0000000..fe5451a
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/YAMLSeq.js
@@ -0,0 +1,107 @@
+'use strict';
+
+var stringifyCollection = require('../stringify/stringifyCollection.js');
+var Collection = require('./Collection.js');
+var Node = require('./Node.js');
+var Scalar = require('./Scalar.js');
+var toJS = require('./toJS.js');
+
+class YAMLSeq extends Collection.Collection {
+ constructor(schema) {
+ super(Node.SEQ, schema);
+ this.items = [];
+ }
+ static get tagName() {
+ return 'tag:yaml.org,2002:seq';
+ }
+ add(value) {
+ this.items.push(value);
+ }
+ /**
+ * Removes a value from the collection.
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ *
+ * @returns `true` if the item was found and removed.
+ */
+ delete(key) {
+ const idx = asItemIndex(key);
+ if (typeof idx !== 'number')
+ return false;
+ const del = this.items.splice(idx, 1);
+ return del.length > 0;
+ }
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ */
+ get(key, keepScalar) {
+ const idx = asItemIndex(key);
+ if (typeof idx !== 'number')
+ return undefined;
+ const it = this.items[idx];
+ return !keepScalar && Node.isScalar(it) ? it.value : it;
+ }
+ /**
+ * Checks if the collection includes a value with the key `key`.
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ */
+ has(key) {
+ const idx = asItemIndex(key);
+ return typeof idx === 'number' && idx < this.items.length;
+ }
+ /**
+ * Sets a value in this collection. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ *
+ * If `key` does not contain a representation of an integer, this will throw.
+ * It may be wrapped in a `Scalar`.
+ */
+ set(key, value) {
+ const idx = asItemIndex(key);
+ if (typeof idx !== 'number')
+ throw new Error(`Expected a valid index, not ${key}.`);
+ const prev = this.items[idx];
+ if (Node.isScalar(prev) && Scalar.isScalarValue(value))
+ prev.value = value;
+ else
+ this.items[idx] = value;
+ }
+ toJSON(_, ctx) {
+ const seq = [];
+ if (ctx === null || ctx === void 0 ? void 0 : ctx.onCreate)
+ ctx.onCreate(seq);
+ let i = 0;
+ for (const item of this.items)
+ seq.push(toJS.toJS(item, String(i++), ctx));
+ return seq;
+ }
+ toString(ctx, onComment, onChompKeep) {
+ if (!ctx)
+ return JSON.stringify(this);
+ return stringifyCollection.stringifyCollection(this, ctx, {
+ blockItemPrefix: '- ',
+ flowChars: { start: '[', end: ']' },
+ itemIndent: (ctx.indent || '') + ' ',
+ onChompKeep,
+ onComment
+ });
+ }
+}
+function asItemIndex(key) {
+ let idx = Node.isScalar(key) ? key.value : key;
+ if (idx && typeof idx === 'string')
+ idx = Number(idx);
+ return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0
+ ? idx
+ : null;
+}
+
+exports.YAMLSeq = YAMLSeq;
diff --git a/node_modules/yaml/dist/nodes/addPairToJSMap.d.ts b/node_modules/yaml/dist/nodes/addPairToJSMap.d.ts
new file mode 100644
index 0000000..5b0bb11
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/addPairToJSMap.d.ts
@@ -0,0 +1,3 @@
+import type { Pair } from './Pair.js';
+import { ToJSContext } from './toJS.js';
+export declare function addPairToJSMap(ctx: ToJSContext | undefined, map: Map<unknown, unknown> | Set<unknown> | Record<string | number | symbol, unknown>, { key, value }: Pair): Map<unknown, unknown> | Set<unknown> | Record<string | number | symbol, unknown>;
diff --git a/node_modules/yaml/dist/nodes/addPairToJSMap.js b/node_modules/yaml/dist/nodes/addPairToJSMap.js
new file mode 100644
index 0000000..5c0f755
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/addPairToJSMap.js
@@ -0,0 +1,106 @@
+'use strict';
+
+var log = require('../log.js');
+var stringify = require('../stringify/stringify.js');
+var Node = require('./Node.js');
+var Scalar = require('./Scalar.js');
+var toJS = require('./toJS.js');
+
+const MERGE_KEY = '<<';
+function addPairToJSMap(ctx, map, { key, value }) {
+ if ((ctx === null || ctx === void 0 ? void 0 : ctx.doc.schema.merge) && isMergeKey(key)) {
+ value = Node.isAlias(value) ? value.resolve(ctx.doc) : value;
+ if (Node.isSeq(value))
+ for (const it of value.items)
+ mergeToJSMap(ctx, map, it);
+ else if (Array.isArray(value))
+ for (const it of value)
+ mergeToJSMap(ctx, map, it);
+ else
+ mergeToJSMap(ctx, map, value);
+ }
+ else {
+ const jsKey = toJS.toJS(key, '', ctx);
+ if (map instanceof Map) {
+ map.set(jsKey, toJS.toJS(value, jsKey, ctx));
+ }
+ else if (map instanceof Set) {
+ map.add(jsKey);
+ }
+ else {
+ const stringKey = stringifyKey(key, jsKey, ctx);
+ const jsValue = toJS.toJS(value, stringKey, ctx);
+ if (stringKey in map)
+ Object.defineProperty(map, stringKey, {
+ value: jsValue,
+ writable: true,
+ enumerable: true,
+ configurable: true
+ });
+ else
+ map[stringKey] = jsValue;
+ }
+ }
+ return map;
+}
+const isMergeKey = (key) => key === MERGE_KEY ||
+ (Node.isScalar(key) &&
+ key.value === MERGE_KEY &&
+ (!key.type || key.type === Scalar.Scalar.PLAIN));
+// If the value associated with a merge key is a single mapping node, each of
+// its key/value pairs is inserted into the current mapping, unless the key
+// already exists in it. If the value associated with the merge key is a
+// sequence, then this sequence is expected to contain mapping nodes and each
+// of these nodes is merged in turn according to its order in the sequence.
+// Keys in mapping nodes earlier in the sequence override keys specified in
+// later mapping nodes. -- http://yaml.org/type/merge.html
+function mergeToJSMap(ctx, map, value) {
+ const source = ctx && Node.isAlias(value) ? value.resolve(ctx.doc) : value;
+ if (!Node.isMap(source))
+ throw new Error('Merge sources must be maps or map aliases');
+ const srcMap = source.toJSON(null, ctx, Map);
+ for (const [key, value] of srcMap) {
+ if (map instanceof Map) {
+ if (!map.has(key))
+ map.set(key, value);
+ }
+ else if (map instanceof Set) {
+ map.add(key);
+ }
+ else if (!Object.prototype.hasOwnProperty.call(map, key)) {
+ Object.defineProperty(map, key, {
+ value,
+ writable: true,
+ enumerable: true,
+ configurable: true
+ });
+ }
+ }
+ return map;
+}
+function stringifyKey(key, jsKey, ctx) {
+ if (jsKey === null)
+ return '';
+ if (typeof jsKey !== 'object')
+ return String(jsKey);
+ if (Node.isNode(key) && ctx && ctx.doc) {
+ const strCtx = stringify.createStringifyContext(ctx.doc, {});
+ strCtx.anchors = new Set();
+ for (const node of ctx.anchors.keys())
+ strCtx.anchors.add(node.anchor);
+ strCtx.inFlow = true;
+ strCtx.inStringifyKey = true;
+ const strKey = key.toString(strCtx);
+ if (!ctx.mapKeyWarned) {
+ let jsonStr = JSON.stringify(strKey);
+ if (jsonStr.length > 40)
+ jsonStr = jsonStr.substring(0, 36) + '..."';
+ log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);
+ ctx.mapKeyWarned = true;
+ }
+ return strKey;
+ }
+ return JSON.stringify(jsKey);
+}
+
+exports.addPairToJSMap = addPairToJSMap;
diff --git a/node_modules/yaml/dist/nodes/toJS.d.ts b/node_modules/yaml/dist/nodes/toJS.d.ts
new file mode 100644
index 0000000..85d794d
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/toJS.d.ts
@@ -0,0 +1,30 @@
+import type { Document } from '../doc/Document.js';
+import type { stringify } from '../stringify/stringify.js';
+import { Node } from './Node.js';
+export interface AnchorData {
+ aliasCount: number;
+ count: number;
+ res: unknown;
+}
+export interface ToJSContext {
+ anchors: Map<Node, AnchorData>;
+ doc: Document;
+ keep: boolean;
+ mapAsMap: boolean;
+ mapKeyWarned: boolean;
+ maxAliasCount: number;
+ onCreate?: (res: unknown) => void;
+ /** Requiring this directly in Pair would create circular dependencies */
+ stringify: typeof stringify;
+}
+/**
+ * Recursively convert any node or its contents to native JavaScript
+ *
+ * @param value - The input value
+ * @param arg - If `value` defines a `toJSON()` method, use this
+ * as its first argument
+ * @param ctx - Conversion context, originally set in Document#toJS(). If
+ * `{ keep: true }` is not set, output should be suitable for JSON
+ * stringification.
+ */
+export declare function toJS(value: any, arg: string | null, ctx?: ToJSContext): any;
diff --git a/node_modules/yaml/dist/nodes/toJS.js b/node_modules/yaml/dist/nodes/toJS.js
new file mode 100644
index 0000000..b155e6c
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/toJS.js
@@ -0,0 +1,39 @@
+'use strict';
+
+var Node = require('./Node.js');
+
+/**
+ * Recursively convert any node or its contents to native JavaScript
+ *
+ * @param value - The input value
+ * @param arg - If `value` defines a `toJSON()` method, use this
+ * as its first argument
+ * @param ctx - Conversion context, originally set in Document#toJS(). If
+ * `{ keep: true }` is not set, output should be suitable for JSON
+ * stringification.
+ */
+function toJS(value, arg, ctx) {
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-return
+ if (Array.isArray(value))
+ return value.map((v, i) => toJS(v, String(i), ctx));
+ if (value && typeof value.toJSON === 'function') {
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-call
+ if (!ctx || !Node.hasAnchor(value))
+ return value.toJSON(arg, ctx);
+ const data = { aliasCount: 0, count: 1, res: undefined };
+ ctx.anchors.set(value, data);
+ ctx.onCreate = res => {
+ data.res = res;
+ delete ctx.onCreate;
+ };
+ const res = value.toJSON(arg, ctx);
+ if (ctx.onCreate)
+ ctx.onCreate(res);
+ return res;
+ }
+ if (typeof value === 'bigint' && !(ctx === null || ctx === void 0 ? void 0 : ctx.keep))
+ return Number(value);
+ return value;
+}
+
+exports.toJS = toJS;
diff --git a/node_modules/yaml/dist/options.d.ts b/node_modules/yaml/dist/options.d.ts
new file mode 100644
index 0000000..9444c2e
--- /dev/null
+++ b/node_modules/yaml/dist/options.d.ts
@@ -0,0 +1,330 @@
+import type { Reviver } from './doc/applyReviver.js';
+import type { Directives } from './doc/directives.js';
+import type { LogLevelId } from './log.js';
+import type { ParsedNode } from './nodes/Node.js';
+import type { Pair } from './nodes/Pair.js';
+import type { Scalar } from './nodes/Scalar.js';
+import type { LineCounter } from './parse/line-counter.js';
+import type { Schema } from './schema/Schema.js';
+import type { Tags } from './schema/tags.js';
+import type { CollectionTag, ScalarTag } from './schema/types.js';
+export declare type ParseOptions = {
+ /**
+ * Whether integers should be parsed into BigInt rather than number values.
+ *
+ * Default: `false`
+ *
+ * https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/BigInt
+ */
+ intAsBigInt?: boolean;
+ /**
+ * Include a `srcToken` value on each parsed `Node`, containing the CST token
+ * that was composed into this node.
+ *
+ * Default: `false`
+ */
+ keepSourceTokens?: boolean;
+ /**
+ * If set, newlines will be tracked, to allow for `lineCounter.linePos(offset)`
+ * to provide the `{ line, col }` positions within the input.
+ */
+ lineCounter?: LineCounter;
+ /**
+ * Include line/col position & node type directly in parse errors.
+ *
+ * Default: `true`
+ */
+ prettyErrors?: boolean;
+ /**
+ * Detect and report errors that are required by the YAML 1.2 spec,
+ * but are caused by unambiguous content.
+ *
+ * Default: `true`
+ */
+ strict?: boolean;
+ /**
+ * YAML requires map keys to be unique. By default, this is checked by
+ * comparing scalar values with `===`; deep equality is not checked for
+ * aliases or collections. If merge keys are enabled by the schema,
+ * multiple `<<` keys are allowed.
+ *
+ * Set `false` to disable, or provide your own comparator function to
+ * customise. The comparator will be passed two `ParsedNode` values, and
+ * is expected to return a `boolean` indicating their equality.
+ *
+ * Default: `true`
+ */
+ uniqueKeys?: boolean | ((a: ParsedNode, b: ParsedNode) => boolean);
+};
+export declare type DocumentOptions = {
+ /**
+ * Used internally by Composer. If set and includes an explicit version,
+ * that overrides the `version` option.
+ */
+ directives?: Directives;
+ /**
+ * Control the logging level during parsing
+ *
+ * Default: `'warn'`
+ */
+ logLevel?: LogLevelId;
+ /**
+ * The YAML version used by documents without a `%YAML` directive.
+ *
+ * Default: `"1.2"`
+ */
+ version?: '1.1' | '1.2' | 'next';
+};
+export declare type SchemaOptions = {
+ /**
+ * When parsing, warn about compatibility issues with the given schema.
+ * When stringifying, use scalar styles that are parsed correctly
+ * by the `compat` schema as well as the actual schema.
+ *
+ * Default: `null`
+ */
+ compat?: string | Tags | null;
+ /**
+ * Array of additional tags to include in the schema, or a function that may
+ * modify the schema's base tag array.
+ */
+ customTags?: Tags | ((tags: Tags) => Tags) | null;
+ /**
+ * Enable support for `<<` merge keys.
+ *
+ * Default: `false` for YAML 1.2, `true` for earlier versions
+ */
+ merge?: boolean;
+ /**
+ * When using the `'core'` schema, support parsing values with these
+ * explicit YAML 1.1 tags:
+ *
+ * `!!binary`, `!!omap`, `!!pairs`, `!!set`, `!!timestamp`.
+ *
+ * Default `true`
+ */
+ resolveKnownTags?: boolean;
+ /**
+ * The base schema to use.
+ *
+ * The core library has built-in support for the following:
+ * - `'failsafe'`: A minimal schema that parses all scalars as strings
+ * - `'core'`: The YAML 1.2 core schema
+ * - `'json'`: The YAML 1.2 JSON schema, with minimal rules for JSON compatibility
+ * - `'yaml-1.1'`: The YAML 1.1 schema
+ *
+ * If using another (custom) schema, the `customTags` array needs to
+ * fully define the schema's tags.
+ *
+ * Default: `'core'` for YAML 1.2, `'yaml-1.1'` for earlier versions
+ */
+ schema?: string | Schema;
+ /**
+ * When adding to or stringifying a map, sort the entries.
+ * If `true`, sort by comparing key values with `<`.
+ * Does not affect item order when parsing.
+ *
+ * Default: `false`
+ */
+ sortMapEntries?: boolean | ((a: Pair, b: Pair) => number);
+ /**
+ * Override default values for `toString()` options.
+ */
+ toStringDefaults?: ToStringOptions;
+};
+export declare type CreateNodeOptions = {
+ /**
+ * During node construction, use anchors and aliases to keep strictly equal
+ * non-null objects as equivalent in YAML.
+ *
+ * Default: `true`
+ */
+ aliasDuplicateObjects?: boolean;
+ /**
+ * Default prefix for anchors.
+ *
+ * Default: `'a'`, resulting in anchors `a1`, `a2`, etc.
+ */
+ anchorPrefix?: string;
+ /** Force the top-level collection node to use flow style. */
+ flow?: boolean;
+ /**
+ * Keep `undefined` object values when creating mappings, rather than
+ * discarding them.
+ *
+ * Default: `false`
+ */
+ keepUndefined?: boolean | null;
+ onTagObj?: (tagObj: ScalarTag | CollectionTag) => void;
+ /**
+ * Specify the top-level collection type, e.g. `"!!omap"`. Note that this
+ * requires the corresponding tag to be available in this document's schema.
+ */
+ tag?: string;
+};
+export declare type ToJSOptions = {
+ /**
+ * Use Map rather than Object to represent mappings.
+ *
+ * Default: `false`
+ */
+ mapAsMap?: boolean;
+ /**
+ * Prevent exponential entity expansion attacks by limiting data aliasing count;
+ * set to `-1` to disable checks; `0` disallows all alias nodes.
+ *
+ * Default: `100`
+ */
+ maxAliasCount?: number;
+ /**
+ * If defined, called with the resolved `value` and reference `count` for
+ * each anchor in the document.
+ */
+ onAnchor?: (value: unknown, count: number) => void;
+ /**
+ * Optional function that may filter or modify the output JS value
+ *
+ * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#using_the_reviver_parameter
+ */
+ reviver?: Reviver;
+};
+export declare type ToStringOptions = {
+ /**
+ * Use block quote styles for scalar values where applicable.
+ * Set to `false` to disable block quotes completely.
+ *
+ * Default: `true`
+ */
+ blockQuote?: boolean | 'folded' | 'literal';
+ /**
+ * Enforce `'block'` or `'flow'` style on maps and sequences.
+ * Empty collections will always be stringified as `{}` or `[]`.
+ *
+ * Default: `'any'`, allowing each node to set its style separately
+ * with its `flow: boolean` (default `false`) property.
+ */
+ collectionStyle?: 'any' | 'block' | 'flow';
+ /**
+ * Comment stringifier.
+ * Output should be valid for the current schema.
+ *
+ * By default, empty comment lines are left empty,
+ * lines consisting of a single space are replaced by `#`,
+ * and all other lines are prefixed with a `#`.
+ */
+ commentString?: (comment: string) => string;
+ /**
+ * The default type of string literal used to stringify implicit key values.
+ * Output may use other types if required to fully represent the value.
+ *
+ * If `null`, the value of `defaultStringType` is used.
+ *
+ * Default: `null`
+ */
+ defaultKeyType?: Scalar.Type | null;
+ /**
+ * The default type of string literal used to stringify values in general.
+ * Output may use other types if required to fully represent the value.
+ *
+ * Default: `'PLAIN'`
+ */
+ defaultStringType?: Scalar.Type;
+ /**
+ * Include directives in the output.
+ *
+ * - If `true`, at least the document-start marker `---` is always included.
+ * This does not force the `%YAML` directive to be included. To do that,
+ * set `doc.directives.yaml.explicit = true`.
+ * - If `false`, no directives or marker is ever included. If using the `%TAG`
+ * directive, you are expected to include it manually in the stream before
+ * its use.
+ * - If `null`, directives and marker may be included if required.
+ *
+ * Default: `null`
+ */
+ directives?: boolean | null;
+ /**
+ * Restrict double-quoted strings to use JSON-compatible syntax.
+ *
+ * Default: `false`
+ */
+ doubleQuotedAsJSON?: boolean;
+ /**
+ * Minimum length for double-quoted strings to use multiple lines to
+ * represent the value. Ignored if `doubleQuotedAsJSON` is set.
+ *
+ * Default: `40`
+ */
+ doubleQuotedMinMultiLineLength?: number;
+ /**
+ * String representation for `false`.
+ * With the core schema, use `'false'`, `'False'`, or `'FALSE'`.
+ *
+ * Default: `'false'`
+ */
+ falseStr?: string;
+ /**
+ * The number of spaces to use when indenting code.
+ *
+ * Default: `2`
+ */
+ indent?: number;
+ /**
+ * Whether block sequences should be indented.
+ *
+ * Default: `true`
+ */
+ indentSeq?: boolean;
+ /**
+ * Maximum line width (set to `0` to disable folding).
+ *
+ * This is a soft limit, as only double-quoted semantics allow for inserting
+ * a line break in the middle of a word, as well as being influenced by the
+ * `minContentWidth` option.
+ *
+ * Default: `80`
+ */
+ lineWidth?: number;
+ /**
+ * Minimum line width for highly-indented content (set to `0` to disable).
+ *
+ * Default: `20`
+ */
+ minContentWidth?: number;
+ /**
+ * String representation for `null`.
+ * With the core schema, use `'null'`, `'Null'`, `'NULL'`, `'~'`, or an empty
+ * string `''`.
+ *
+ * Default: `'null'`
+ */
+ nullStr?: string;
+ /**
+ * Require keys to be scalars and to use implicit rather than explicit notation.
+ *
+ * Default: `false`
+ */
+ simpleKeys?: boolean;
+ /**
+ * Use 'single quote' rather than "double quote" where applicable.
+ * Set to `false` to disable single quotes completely.
+ *
+ * Default: `null`
+ */
+ singleQuote?: boolean | null;
+ /**
+ * String representation for `true`.
+ * With the core schema, use `'true'`, `'True'`, or `'TRUE'`.
+ *
+ * Default: `'true'`
+ */
+ trueStr?: string;
+ /**
+ * The anchor used by an alias must be defined before the alias node. As it's
+ * possible for the document to be modified manually, the order may be
+ * verified during stringification.
+ *
+ * Default: `'true'`
+ */
+ verifyAliasOrder?: boolean;
+};
diff --git a/node_modules/yaml/dist/parse/cst-scalar.d.ts b/node_modules/yaml/dist/parse/cst-scalar.d.ts
new file mode 100644
index 0000000..a7bd1d6
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst-scalar.d.ts
@@ -0,0 +1,64 @@
+import { ErrorCode } from '../errors.js';
+import { Range } from '../nodes/Node.js';
+import type { Scalar } from '../nodes/Scalar.js';
+import type { BlockScalar, FlowScalar, SourceToken, Token } from './cst.js';
+/**
+ * If `token` is a CST flow or block scalar, determine its string value and a few other attributes.
+ * Otherwise, return `null`.
+ */
+export declare function resolveAsScalar(token: FlowScalar | BlockScalar, strict?: boolean, onError?: (offset: number, code: ErrorCode, message: string) => void): {
+ value: string;
+ type: Scalar.Type | null;
+ comment: string;
+ range: Range;
+};
+export declare function resolveAsScalar(token: Token | null | undefined, strict?: boolean, onError?: (offset: number, code: ErrorCode, message: string) => void): {
+ value: string;
+ type: Scalar.Type | null;
+ comment: string;
+ range: Range;
+} | null;
+/**
+ * Create a new scalar token with `value`
+ *
+ * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
+ * as this function does not support any schema operations and won't check for such conflicts.
+ *
+ * @param value The string representation of the value, which will have its content properly indented.
+ * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.
+ * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
+ * @param context.indent The indent level of the token.
+ * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.
+ * @param context.offset The offset position of the token.
+ * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
+ */
+export declare function createScalarToken(value: string, context: {
+ end?: SourceToken[];
+ implicitKey?: boolean;
+ indent: number;
+ inFlow?: boolean;
+ offset?: number;
+ type?: Scalar.Type;
+}): BlockScalar | FlowScalar;
+/**
+ * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
+ *
+ * Best efforts are made to retain any comments previously associated with the `token`,
+ * though all contents within a collection's `items` will be overwritten.
+ *
+ * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
+ * as this function does not support any schema operations and won't check for such conflicts.
+ *
+ * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
+ * @param value The string representation of the value, which will have its content properly indented.
+ * @param context.afterKey In most cases, values after a key should have an additional level of indentation.
+ * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
+ * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
+ * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
+ */
+export declare function setScalarValue(token: Token, value: string, context?: {
+ afterKey?: boolean;
+ implicitKey?: boolean;
+ inFlow?: boolean;
+ type?: Scalar.Type;
+}): void;
diff --git a/node_modules/yaml/dist/parse/cst-scalar.js b/node_modules/yaml/dist/parse/cst-scalar.js
new file mode 100644
index 0000000..6bee70e
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst-scalar.js
@@ -0,0 +1,219 @@
+'use strict';
+
+var resolveBlockScalar = require('../compose/resolve-block-scalar.js');
+var resolveFlowScalar = require('../compose/resolve-flow-scalar.js');
+var errors = require('../errors.js');
+var stringifyString = require('../stringify/stringifyString.js');
+
+function resolveAsScalar(token, strict = true, onError) {
+ if (token) {
+ const _onError = (pos, code, message) => {
+ const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;
+ if (onError)
+ onError(offset, code, message);
+ else
+ throw new errors.YAMLParseError([offset, offset + 1], code, message);
+ };
+ switch (token.type) {
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ return resolveFlowScalar.resolveFlowScalar(token, strict, _onError);
+ case 'block-scalar':
+ return resolveBlockScalar.resolveBlockScalar(token, strict, _onError);
+ }
+ }
+ return null;
+}
+/**
+ * Create a new scalar token with `value`
+ *
+ * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
+ * as this function does not support any schema operations and won't check for such conflicts.
+ *
+ * @param value The string representation of the value, which will have its content properly indented.
+ * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.
+ * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
+ * @param context.indent The indent level of the token.
+ * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.
+ * @param context.offset The offset position of the token.
+ * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
+ */
+function createScalarToken(value, context) {
+ var _a;
+ const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;
+ const source = stringifyString.stringifyString({ type, value }, {
+ implicitKey,
+ indent: indent > 0 ? ' '.repeat(indent) : '',
+ inFlow,
+ options: { blockQuote: true, lineWidth: -1 }
+ });
+ const end = (_a = context.end) !== null && _a !== void 0 ? _a : [
+ { type: 'newline', offset: -1, indent, source: '\n' }
+ ];
+ switch (source[0]) {
+ case '|':
+ case '>': {
+ const he = source.indexOf('\n');
+ const head = source.substring(0, he);
+ const body = source.substring(he + 1) + '\n';
+ const props = [
+ { type: 'block-scalar-header', offset, indent, source: head }
+ ];
+ if (!addEndtoBlockProps(props, end))
+ props.push({ type: 'newline', offset: -1, indent, source: '\n' });
+ return { type: 'block-scalar', offset, indent, props, source: body };
+ }
+ case '"':
+ return { type: 'double-quoted-scalar', offset, indent, source, end };
+ case "'":
+ return { type: 'single-quoted-scalar', offset, indent, source, end };
+ default:
+ return { type: 'scalar', offset, indent, source, end };
+ }
+}
+/**
+ * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
+ *
+ * Best efforts are made to retain any comments previously associated with the `token`,
+ * though all contents within a collection's `items` will be overwritten.
+ *
+ * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
+ * as this function does not support any schema operations and won't check for such conflicts.
+ *
+ * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
+ * @param value The string representation of the value, which will have its content properly indented.
+ * @param context.afterKey In most cases, values after a key should have an additional level of indentation.
+ * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
+ * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
+ * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
+ */
+function setScalarValue(token, value, context = {}) {
+ let { afterKey = false, implicitKey = false, inFlow = false, type } = context;
+ let indent = 'indent' in token ? token.indent : null;
+ if (afterKey && typeof indent === 'number')
+ indent += 2;
+ if (!type)
+ switch (token.type) {
+ case 'single-quoted-scalar':
+ type = 'QUOTE_SINGLE';
+ break;
+ case 'double-quoted-scalar':
+ type = 'QUOTE_DOUBLE';
+ break;
+ case 'block-scalar': {
+ const header = token.props[0];
+ if (header.type !== 'block-scalar-header')
+ throw new Error('Invalid block scalar header');
+ type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';
+ break;
+ }
+ default:
+ type = 'PLAIN';
+ }
+ const source = stringifyString.stringifyString({ type, value }, {
+ implicitKey: implicitKey || indent === null,
+ indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',
+ inFlow,
+ options: { blockQuote: true, lineWidth: -1 }
+ });
+ switch (source[0]) {
+ case '|':
+ case '>':
+ setBlockScalarValue(token, source);
+ break;
+ case '"':
+ setFlowScalarValue(token, source, 'double-quoted-scalar');
+ break;
+ case "'":
+ setFlowScalarValue(token, source, 'single-quoted-scalar');
+ break;
+ default:
+ setFlowScalarValue(token, source, 'scalar');
+ }
+}
+function setBlockScalarValue(token, source) {
+ const he = source.indexOf('\n');
+ const head = source.substring(0, he);
+ const body = source.substring(he + 1) + '\n';
+ if (token.type === 'block-scalar') {
+ const header = token.props[0];
+ if (header.type !== 'block-scalar-header')
+ throw new Error('Invalid block scalar header');
+ header.source = head;
+ token.source = body;
+ }
+ else {
+ const { offset } = token;
+ const indent = 'indent' in token ? token.indent : -1;
+ const props = [
+ { type: 'block-scalar-header', offset, indent, source: head }
+ ];
+ if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))
+ props.push({ type: 'newline', offset: -1, indent, source: '\n' });
+ for (const key of Object.keys(token))
+ if (key !== 'type' && key !== 'offset')
+ delete token[key];
+ Object.assign(token, { type: 'block-scalar', indent, props, source: body });
+ }
+}
+/** @returns `true` if last token is a newline */
+function addEndtoBlockProps(props, end) {
+ if (end)
+ for (const st of end)
+ switch (st.type) {
+ case 'space':
+ case 'comment':
+ props.push(st);
+ break;
+ case 'newline':
+ props.push(st);
+ return true;
+ }
+ return false;
+}
+function setFlowScalarValue(token, source, type) {
+ switch (token.type) {
+ case 'scalar':
+ case 'double-quoted-scalar':
+ case 'single-quoted-scalar':
+ token.type = type;
+ token.source = source;
+ break;
+ case 'block-scalar': {
+ const end = token.props.slice(1);
+ let oa = source.length;
+ if (token.props[0].type === 'block-scalar-header')
+ oa -= token.props[0].source.length;
+ for (const tok of end)
+ tok.offset += oa;
+ delete token.props;
+ Object.assign(token, { type, source, end });
+ break;
+ }
+ case 'block-map':
+ case 'block-seq': {
+ const offset = token.offset + source.length;
+ const nl = { type: 'newline', offset, indent: token.indent, source: '\n' };
+ delete token.items;
+ Object.assign(token, { type, source, end: [nl] });
+ break;
+ }
+ default: {
+ const indent = 'indent' in token ? token.indent : -1;
+ const end = 'end' in token && Array.isArray(token.end)
+ ? token.end.filter(st => st.type === 'space' ||
+ st.type === 'comment' ||
+ st.type === 'newline')
+ : [];
+ for (const key of Object.keys(token))
+ if (key !== 'type' && key !== 'offset')
+ delete token[key];
+ Object.assign(token, { type, indent, source, end });
+ }
+ }
+}
+
+exports.createScalarToken = createScalarToken;
+exports.resolveAsScalar = resolveAsScalar;
+exports.setScalarValue = setScalarValue;
diff --git a/node_modules/yaml/dist/parse/cst-stringify.d.ts b/node_modules/yaml/dist/parse/cst-stringify.d.ts
new file mode 100644
index 0000000..dbf66d6
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst-stringify.d.ts
@@ -0,0 +1,8 @@
+import type { CollectionItem, Token } from './cst.js';
+/**
+ * Stringify a CST document, token, or collection item
+ *
+ * Fair warning: This applies no validation whatsoever, and
+ * simply concatenates the sources in their logical order.
+ */
+export declare const stringify: (cst: Token | CollectionItem) => string;
diff --git a/node_modules/yaml/dist/parse/cst-stringify.js b/node_modules/yaml/dist/parse/cst-stringify.js
new file mode 100644
index 0000000..78e8c37
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst-stringify.js
@@ -0,0 +1,63 @@
+'use strict';
+
+/**
+ * Stringify a CST document, token, or collection item
+ *
+ * Fair warning: This applies no validation whatsoever, and
+ * simply concatenates the sources in their logical order.
+ */
+const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);
+function stringifyToken(token) {
+ switch (token.type) {
+ case 'block-scalar': {
+ let res = '';
+ for (const tok of token.props)
+ res += stringifyToken(tok);
+ return res + token.source;
+ }
+ case 'block-map':
+ case 'block-seq': {
+ let res = '';
+ for (const item of token.items)
+ res += stringifyItem(item);
+ return res;
+ }
+ case 'flow-collection': {
+ let res = token.start.source;
+ for (const item of token.items)
+ res += stringifyItem(item);
+ for (const st of token.end)
+ res += st.source;
+ return res;
+ }
+ case 'document': {
+ let res = stringifyItem(token);
+ if (token.end)
+ for (const st of token.end)
+ res += st.source;
+ return res;
+ }
+ default: {
+ let res = token.source;
+ if ('end' in token && token.end)
+ for (const st of token.end)
+ res += st.source;
+ return res;
+ }
+ }
+}
+function stringifyItem({ start, key, sep, value }) {
+ let res = '';
+ for (const st of start)
+ res += st.source;
+ if (key)
+ res += stringifyToken(key);
+ if (sep)
+ for (const st of sep)
+ res += st.source;
+ if (value)
+ res += stringifyToken(value);
+ return res;
+}
+
+exports.stringify = stringify;
diff --git a/node_modules/yaml/dist/parse/cst-visit.d.ts b/node_modules/yaml/dist/parse/cst-visit.d.ts
new file mode 100644
index 0000000..71c6029
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst-visit.d.ts
@@ -0,0 +1,39 @@
+import type { CollectionItem, Document } from './cst.js';
+export declare type VisitPath = readonly ['key' | 'value', number][];
+export declare type Visitor = (item: CollectionItem, path: VisitPath) => number | symbol | Visitor | void;
+/**
+ * Apply a visitor to a CST document or item.
+ *
+ * Walks through the tree (depth-first) starting from the root, calling a
+ * `visitor` function with two arguments when entering each item:
+ * - `item`: The current item, which included the following members:
+ * - `start: SourceToken[]` – Source tokens before the key or value,
+ * possibly including its anchor or tag.
+ * - `key?: Token | null` – Set for pair values. May then be `null`, if
+ * the key before the `:` separator is empty.
+ * - `sep?: SourceToken[]` – Source tokens between the key and the value,
+ * which should include the `:` map value indicator if `value` is set.
+ * - `value?: Token` – The value of a sequence item, or of a map pair.
+ * - `path`: The steps from the root to the current node, as an array of
+ * `['key' | 'value', number]` tuples.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this token, continue with
+ * next sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current item, then continue with the next one
+ * - `number`: Set the index of the next step. This is useful especially if
+ * the index of the current token has changed.
+ * - `function`: Define the next visitor for this item. After the original
+ * visitor is called on item entry, next visitors are called after handling
+ * a non-empty `key` and when exiting the item.
+ */
+export declare function visit(cst: Document | CollectionItem, visitor: Visitor): void;
+export declare namespace visit {
+ var BREAK: symbol;
+ var SKIP: symbol;
+ var REMOVE: symbol;
+ var itemAtPath: (cst: Document | CollectionItem, path: VisitPath) => CollectionItem | undefined;
+ var parentCollection: (cst: Document | CollectionItem, path: VisitPath) => import("./cst.js").BlockMap | import("./cst.js").BlockSequence | import("./cst.js").FlowCollection;
+}
diff --git a/node_modules/yaml/dist/parse/cst-visit.js b/node_modules/yaml/dist/parse/cst-visit.js
new file mode 100644
index 0000000..dbc47bd
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst-visit.js
@@ -0,0 +1,99 @@
+'use strict';
+
+const BREAK = Symbol('break visit');
+const SKIP = Symbol('skip children');
+const REMOVE = Symbol('remove item');
+/**
+ * Apply a visitor to a CST document or item.
+ *
+ * Walks through the tree (depth-first) starting from the root, calling a
+ * `visitor` function with two arguments when entering each item:
+ * - `item`: The current item, which included the following members:
+ * - `start: SourceToken[]` – Source tokens before the key or value,
+ * possibly including its anchor or tag.
+ * - `key?: Token | null` – Set for pair values. May then be `null`, if
+ * the key before the `:` separator is empty.
+ * - `sep?: SourceToken[]` – Source tokens between the key and the value,
+ * which should include the `:` map value indicator if `value` is set.
+ * - `value?: Token` – The value of a sequence item, or of a map pair.
+ * - `path`: The steps from the root to the current node, as an array of
+ * `['key' | 'value', number]` tuples.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this token, continue with
+ * next sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current item, then continue with the next one
+ * - `number`: Set the index of the next step. This is useful especially if
+ * the index of the current token has changed.
+ * - `function`: Define the next visitor for this item. After the original
+ * visitor is called on item entry, next visitors are called after handling
+ * a non-empty `key` and when exiting the item.
+ */
+function visit(cst, visitor) {
+ if ('type' in cst && cst.type === 'document')
+ cst = { start: cst.start, value: cst.value };
+ _visit(Object.freeze([]), cst, visitor);
+}
+// Without the `as symbol` casts, TS declares these in the `visit`
+// namespace using `var`, but then complains about that because
+// `unique symbol` must be `const`.
+/** Terminate visit traversal completely */
+visit.BREAK = BREAK;
+/** Do not visit the children of the current item */
+visit.SKIP = SKIP;
+/** Remove the current item */
+visit.REMOVE = REMOVE;
+/** Find the item at `path` from `cst` as the root */
+visit.itemAtPath = (cst, path) => {
+ let item = cst;
+ for (const [field, index] of path) {
+ const tok = item === null || item === void 0 ? void 0 : item[field];
+ if (tok && 'items' in tok) {
+ item = tok.items[index];
+ }
+ else
+ return undefined;
+ }
+ return item;
+};
+/**
+ * Get the immediate parent collection of the item at `path` from `cst` as the root.
+ *
+ * Throws an error if the collection is not found, which should never happen if the item itself exists.
+ */
+visit.parentCollection = (cst, path) => {
+ const parent = visit.itemAtPath(cst, path.slice(0, -1));
+ const field = path[path.length - 1][0];
+ const coll = parent === null || parent === void 0 ? void 0 : parent[field];
+ if (coll && 'items' in coll)
+ return coll;
+ throw new Error('Parent collection not found');
+};
+function _visit(path, item, visitor) {
+ let ctrl = visitor(item, path);
+ if (typeof ctrl === 'symbol')
+ return ctrl;
+ for (const field of ['key', 'value']) {
+ const token = item[field];
+ if (token && 'items' in token) {
+ for (let i = 0; i < token.items.length; ++i) {
+ const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);
+ if (typeof ci === 'number')
+ i = ci - 1;
+ else if (ci === BREAK)
+ return BREAK;
+ else if (ci === REMOVE) {
+ token.items.splice(i, 1);
+ i -= 1;
+ }
+ }
+ if (typeof ctrl === 'function' && field === 'key')
+ ctrl = ctrl(item, path);
+ }
+ }
+ return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;
+}
+
+exports.visit = visit;
diff --git a/node_modules/yaml/dist/parse/cst.d.ts b/node_modules/yaml/dist/parse/cst.d.ts
new file mode 100644
index 0000000..e5dd4e7
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst.d.ts
@@ -0,0 +1,106 @@
+export { createScalarToken, resolveAsScalar, setScalarValue } from './cst-scalar.js';
+export { stringify } from './cst-stringify.js';
+export { visit, Visitor, VisitPath } from './cst-visit.js';
+export interface SourceToken {
+ type: 'byte-order-mark' | 'doc-mode' | 'doc-start' | 'space' | 'comment' | 'newline' | 'directive-line' | 'anchor' | 'tag' | 'seq-item-ind' | 'explicit-key-ind' | 'map-value-ind' | 'flow-map-start' | 'flow-map-end' | 'flow-seq-start' | 'flow-seq-end' | 'flow-error-end' | 'comma' | 'block-scalar-header';
+ offset: number;
+ indent: number;
+ source: string;
+}
+export interface ErrorToken {
+ type: 'error';
+ offset: number;
+ source: string;
+ message: string;
+}
+export interface Directive {
+ type: 'directive';
+ offset: number;
+ source: string;
+}
+export interface Document {
+ type: 'document';
+ offset: number;
+ start: SourceToken[];
+ value?: Token;
+ end?: SourceToken[];
+}
+export interface DocumentEnd {
+ type: 'doc-end';
+ offset: number;
+ source: string;
+ end?: SourceToken[];
+}
+export interface FlowScalar {
+ type: 'alias' | 'scalar' | 'single-quoted-scalar' | 'double-quoted-scalar';
+ offset: number;
+ indent: number;
+ source: string;
+ end?: SourceToken[];
+}
+export interface BlockScalar {
+ type: 'block-scalar';
+ offset: number;
+ indent: number;
+ props: Token[];
+ source: string;
+}
+export interface BlockMap {
+ type: 'block-map';
+ offset: number;
+ indent: number;
+ items: Array<{
+ start: SourceToken[];
+ key?: never;
+ sep?: never;
+ value?: never;
+ } | {
+ start: SourceToken[];
+ key: Token | null;
+ sep: SourceToken[];
+ value?: Token;
+ }>;
+}
+export interface BlockSequence {
+ type: 'block-seq';
+ offset: number;
+ indent: number;
+ items: Array<{
+ start: SourceToken[];
+ key?: never;
+ sep?: never;
+ value?: Token;
+ }>;
+}
+export declare type CollectionItem = {
+ start: SourceToken[];
+ key?: Token | null;
+ sep?: SourceToken[];
+ value?: Token;
+};
+export interface FlowCollection {
+ type: 'flow-collection';
+ offset: number;
+ indent: number;
+ start: SourceToken;
+ items: CollectionItem[];
+ end: SourceToken[];
+}
+export declare type Token = SourceToken | ErrorToken | Directive | Document | DocumentEnd | FlowScalar | BlockScalar | BlockMap | BlockSequence | FlowCollection;
+export declare type TokenType = SourceToken['type'] | DocumentEnd['type'] | FlowScalar['type'];
+/** The byte order mark */
+export declare const BOM = "\uFEFF";
+/** Start of doc-mode */
+export declare const DOCUMENT = "\u0002";
+/** Unexpected end of flow-mode */
+export declare const FLOW_END = "\u0018";
+/** Next token is a scalar value */
+export declare const SCALAR = "\u001F";
+/** @returns `true` if `token` is a flow or block collection */
+export declare const isCollection: (token: Token | null | undefined) => token is BlockMap | BlockSequence | FlowCollection;
+/** @returns `true` if `token` is a flow or block scalar; not an alias */
+export declare const isScalar: (token: Token | null | undefined) => token is BlockScalar | FlowScalar;
+/** Get a printable representation of a lexer token */
+export declare function prettyToken(token: string): string;
+/** Identify the type of a lexer token. May return `null` for unknown tokens. */
+export declare function tokenType(source: string): TokenType | null;
diff --git a/node_modules/yaml/dist/parse/cst.js b/node_modules/yaml/dist/parse/cst.js
new file mode 100644
index 0000000..613c229
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst.js
@@ -0,0 +1,112 @@
+'use strict';
+
+var cstScalar = require('./cst-scalar.js');
+var cstStringify = require('./cst-stringify.js');
+var cstVisit = require('./cst-visit.js');
+
+/** The byte order mark */
+const BOM = '\u{FEFF}';
+/** Start of doc-mode */
+const DOCUMENT = '\x02'; // C0: Start of Text
+/** Unexpected end of flow-mode */
+const FLOW_END = '\x18'; // C0: Cancel
+/** Next token is a scalar value */
+const SCALAR = '\x1f'; // C0: Unit Separator
+/** @returns `true` if `token` is a flow or block collection */
+const isCollection = (token) => !!token && 'items' in token;
+/** @returns `true` if `token` is a flow or block scalar; not an alias */
+const isScalar = (token) => !!token &&
+ (token.type === 'scalar' ||
+ token.type === 'single-quoted-scalar' ||
+ token.type === 'double-quoted-scalar' ||
+ token.type === 'block-scalar');
+/* istanbul ignore next */
+/** Get a printable representation of a lexer token */
+function prettyToken(token) {
+ switch (token) {
+ case BOM:
+ return '<BOM>';
+ case DOCUMENT:
+ return '<DOC>';
+ case FLOW_END:
+ return '<FLOW_END>';
+ case SCALAR:
+ return '<SCALAR>';
+ default:
+ return JSON.stringify(token);
+ }
+}
+/** Identify the type of a lexer token. May return `null` for unknown tokens. */
+function tokenType(source) {
+ switch (source) {
+ case BOM:
+ return 'byte-order-mark';
+ case DOCUMENT:
+ return 'doc-mode';
+ case FLOW_END:
+ return 'flow-error-end';
+ case SCALAR:
+ return 'scalar';
+ case '---':
+ return 'doc-start';
+ case '...':
+ return 'doc-end';
+ case '':
+ case '\n':
+ case '\r\n':
+ return 'newline';
+ case '-':
+ return 'seq-item-ind';
+ case '?':
+ return 'explicit-key-ind';
+ case ':':
+ return 'map-value-ind';
+ case '{':
+ return 'flow-map-start';
+ case '}':
+ return 'flow-map-end';
+ case '[':
+ return 'flow-seq-start';
+ case ']':
+ return 'flow-seq-end';
+ case ',':
+ return 'comma';
+ }
+ switch (source[0]) {
+ case ' ':
+ case '\t':
+ return 'space';
+ case '#':
+ return 'comment';
+ case '%':
+ return 'directive-line';
+ case '*':
+ return 'alias';
+ case '&':
+ return 'anchor';
+ case '!':
+ return 'tag';
+ case "'":
+ return 'single-quoted-scalar';
+ case '"':
+ return 'double-quoted-scalar';
+ case '|':
+ case '>':
+ return 'block-scalar-header';
+ }
+ return null;
+}
+
+exports.createScalarToken = cstScalar.createScalarToken;
+exports.resolveAsScalar = cstScalar.resolveAsScalar;
+exports.setScalarValue = cstScalar.setScalarValue;
+exports.stringify = cstStringify.stringify;
+exports.visit = cstVisit.visit;
+exports.BOM = BOM;
+exports.DOCUMENT = DOCUMENT;
+exports.FLOW_END = FLOW_END;
+exports.SCALAR = SCALAR;
+exports.isCollection = isCollection;
+exports.isScalar = isScalar;
+exports.prettyToken = prettyToken;
+exports.tokenType = tokenType;
diff --git a/node_modules/yaml/dist/parse/lexer.d.ts b/node_modules/yaml/dist/parse/lexer.d.ts
new file mode 100644
index 0000000..238e7b5
--- /dev/null
+++ b/node_modules/yaml/dist/parse/lexer.d.ts
@@ -0,0 +1,87 @@
+/**
+ * Splits an input string into lexical tokens, i.e. smaller strings that are
+ * easily identifiable by `tokens.tokenType()`.
+ *
+ * Lexing starts always in a "stream" context. Incomplete input may be buffered
+ * until a complete token can be emitted.
+ *
+ * In addition to slices of the original input, the following control characters
+ * may also be emitted:
+ *
+ * - `\x02` (Start of Text): A document starts with the next token
+ * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
+ * - `\x1f` (Unit Separator): Next token is a scalar value
+ * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
+ */
+export declare class Lexer {
+ /**
+ * Flag indicating whether the end of the current buffer marks the end of
+ * all input
+ */
+ private atEnd;
+ /**
+ * Explicit indent set in block scalar header, as an offset from the current
+ * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
+ * explicitly set.
+ */
+ private blockScalarIndent;
+ /**
+ * Block scalars that include a + (keep) chomping indicator in their header
+ * include trailing empty lines, which are otherwise excluded from the
+ * scalar's contents.
+ */
+ private blockScalarKeep;
+ /** Current input */
+ private buffer;
+ /**
+ * Flag noting whether the map value indicator : can immediately follow this
+ * node within a flow context.
+ */
+ private flowKey;
+ /** Count of surrounding flow collection levels. */
+ private flowLevel;
+ /**
+ * Minimum level of indentation required for next lines to be parsed as a
+ * part of the current scalar value.
+ */
+ private indentNext;
+ /** Indentation level of the current line. */
+ private indentValue;
+ /** Position of the next \n character. */
+ private lineEndPos;
+ /** Stores the state of the lexer if reaching the end of incpomplete input */
+ private next;
+ /** A pointer to `buffer`; the current position of the lexer. */
+ private pos;
+ /**
+ * Generate YAML tokens from the `source` string. If `incomplete`,
+ * a part of the last line may be left as a buffer for the next call.
+ *
+ * @returns A generator of lexical tokens
+ */
+ lex(source: string, incomplete?: boolean): Generator<string, void, unknown>;
+ private atLineEnd;
+ private charAt;
+ private continueScalar;
+ private getLine;
+ private hasChars;
+ private setNext;
+ private peek;
+ private parseNext;
+ private parseStream;
+ private parseLineStart;
+ private parseBlockStart;
+ private parseDocument;
+ private parseFlowCollection;
+ private parseQuotedScalar;
+ private parseBlockScalarHeader;
+ private parseBlockScalar;
+ private parsePlainScalar;
+ private pushCount;
+ private pushToIndex;
+ private pushIndicators;
+ private pushTag;
+ private pushNewline;
+ private pushSpaces;
+ private pushUntil;
+}
diff --git a/node_modules/yaml/dist/parse/lexer.js b/node_modules/yaml/dist/parse/lexer.js
new file mode 100644
index 0000000..855c11f
--- /dev/null
+++ b/node_modules/yaml/dist/parse/lexer.js
@@ -0,0 +1,704 @@
+'use strict';
+
+var cst = require('./cst.js');
+
+/*
+START -> stream
+
+stream
+ directive -> line-end -> stream
+ indent + line-end -> stream
+ [else] -> line-start
+
+line-end
+ comment -> line-end
+ newline -> .
+ input-end -> END
+
+line-start
+ doc-start -> doc
+ doc-end -> stream
+ [else] -> indent -> block-start
+
+block-start
+ seq-item-start -> block-start
+ explicit-key-start -> block-start
+ map-value-start -> block-start
+ [else] -> doc
+
+doc
+ line-end -> line-start
+ spaces -> doc
+ anchor -> doc
+ tag -> doc
+ flow-start -> flow -> doc
+ flow-end -> error -> doc
+ seq-item-start -> error -> doc
+ explicit-key-start -> error -> doc
+ map-value-start -> doc
+ alias -> doc
+ quote-start -> quoted-scalar -> doc
+ block-scalar-header -> line-end -> block-scalar(min) -> line-start
+ [else] -> plain-scalar(false, min) -> doc
+
+flow
+ line-end -> flow
+ spaces -> flow
+ anchor -> flow
+ tag -> flow
+ flow-start -> flow -> flow
+ flow-end -> .
+ seq-item-start -> error -> flow
+ explicit-key-start -> flow
+ map-value-start -> flow
+ alias -> flow
+ quote-start -> quoted-scalar -> flow
+ comma -> flow
+ [else] -> plain-scalar(true, 0) -> flow
+
+quoted-scalar
+ quote-end -> .
+ [else] -> quoted-scalar
+
+block-scalar(min)
+ newline + peek(indent < min) -> .
+ [else] -> block-scalar(min)
+
+plain-scalar(is-flow, min)
+ scalar-end(is-flow) -> .
+ peek(newline + (indent < min)) -> .
+ [else] -> plain-scalar(min)
+*/
+function isEmpty(ch) {
+ switch (ch) {
+ case undefined:
+ case ' ':
+ case '\n':
+ case '\r':
+ case '\t':
+ return true;
+ default:
+ return false;
+ }
+}
+const hexDigits = '0123456789ABCDEFabcdef'.split('');
+const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split('');
+const invalidFlowScalarChars = ',[]{}'.split('');
+const invalidAnchorChars = ' ,[]{}\n\r\t'.split('');
+const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);
+/**
+ * Splits an input string into lexical tokens, i.e. smaller strings that are
+ * easily identifiable by `tokens.tokenType()`.
+ *
+ * Lexing starts always in a "stream" context. Incomplete input may be buffered
+ * until a complete token can be emitted.
+ *
+ * In addition to slices of the original input, the following control characters
+ * may also be emitted:
+ *
+ * - `\x02` (Start of Text): A document starts with the next token
+ * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
+ * - `\x1f` (Unit Separator): Next token is a scalar value
+ * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
+ */
+class Lexer {
+ constructor() {
+ /**
+ * Flag indicating whether the end of the current buffer marks the end of
+ * all input
+ */
+ this.atEnd = false;
+ /**
+ * Explicit indent set in block scalar header, as an offset from the current
+ * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
+ * explicitly set.
+ */
+ this.blockScalarIndent = -1;
+ /**
+ * Block scalars that include a + (keep) chomping indicator in their header
+ * include trailing empty lines, which are otherwise excluded from the
+ * scalar's contents.
+ */
+ this.blockScalarKeep = false;
+ /** Current input */
+ this.buffer = '';
+ /**
+ * Flag noting whether the map value indicator : can immediately follow this
+ * node within a flow context.
+ */
+ this.flowKey = false;
+ /** Count of surrounding flow collection levels. */
+ this.flowLevel = 0;
+ /**
+ * Minimum level of indentation required for next lines to be parsed as a
+ * part of the current scalar value.
+ */
+ this.indentNext = 0;
+ /** Indentation level of the current line. */
+ this.indentValue = 0;
+ /** Position of the next \n character. */
+ this.lineEndPos = null;
+ /** Stores the state of the lexer if reaching the end of incpomplete input */
+ this.next = null;
+ /** A pointer to `buffer`; the current position of the lexer. */
+ this.pos = 0;
+ }
+ /**
+ * Generate YAML tokens from the `source` string. If `incomplete`,
+ * a part of the last line may be left as a buffer for the next call.
+ *
+ * @returns A generator of lexical tokens
+ */
+ *lex(source, incomplete = false) {
+ var _a;
+ if (source) {
+ this.buffer = this.buffer ? this.buffer + source : source;
+ this.lineEndPos = null;
+ }
+ this.atEnd = !incomplete;
+ let next = (_a = this.next) !== null && _a !== void 0 ? _a : 'stream';
+ while (next && (incomplete || this.hasChars(1)))
+ next = yield* this.parseNext(next);
+ }
+ atLineEnd() {
+ let i = this.pos;
+ let ch = this.buffer[i];
+ while (ch === ' ' || ch === '\t')
+ ch = this.buffer[++i];
+ if (!ch || ch === '#' || ch === '\n')
+ return true;
+ if (ch === '\r')
+ return this.buffer[i + 1] === '\n';
+ return false;
+ }
+ charAt(n) {
+ return this.buffer[this.pos + n];
+ }
+ continueScalar(offset) {
+ let ch = this.buffer[offset];
+ if (this.indentNext > 0) {
+ let indent = 0;
+ while (ch === ' ')
+ ch = this.buffer[++indent + offset];
+ if (ch === '\r') {
+ const next = this.buffer[indent + offset + 1];
+ if (next === '\n' || (!next && !this.atEnd))
+ return offset + indent + 1;
+ }
+ return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd)
+ ? offset + indent
+ : -1;
+ }
+ if (ch === '-' || ch === '.') {
+ const dt = this.buffer.substr(offset, 3);
+ if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))
+ return -1;
+ }
+ return offset;
+ }
+ getLine() {
+ let end = this.lineEndPos;
+ if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {
+ end = this.buffer.indexOf('\n', this.pos);
+ this.lineEndPos = end;
+ }
+ if (end === -1)
+ return this.atEnd ? this.buffer.substring(this.pos) : null;
+ if (this.buffer[end - 1] === '\r')
+ end -= 1;
+ return this.buffer.substring(this.pos, end);
+ }
+ hasChars(n) {
+ return this.pos + n <= this.buffer.length;
+ }
+ setNext(state) {
+ this.buffer = this.buffer.substring(this.pos);
+ this.pos = 0;
+ this.lineEndPos = null;
+ this.next = state;
+ return null;
+ }
+ peek(n) {
+ return this.buffer.substr(this.pos, n);
+ }
+ *parseNext(next) {
+ switch (next) {
+ case 'stream':
+ return yield* this.parseStream();
+ case 'line-start':
+ return yield* this.parseLineStart();
+ case 'block-start':
+ return yield* this.parseBlockStart();
+ case 'doc':
+ return yield* this.parseDocument();
+ case 'flow':
+ return yield* this.parseFlowCollection();
+ case 'quoted-scalar':
+ return yield* this.parseQuotedScalar();
+ case 'block-scalar':
+ return yield* this.parseBlockScalar();
+ case 'plain-scalar':
+ return yield* this.parsePlainScalar();
+ }
+ }
+ *parseStream() {
+ let line = this.getLine();
+ if (line === null)
+ return this.setNext('stream');
+ if (line[0] === cst.BOM) {
+ yield* this.pushCount(1);
+ line = line.substring(1);
+ }
+ if (line[0] === '%') {
+ let dirEnd = line.length;
+ const cs = line.indexOf('#');
+ if (cs !== -1) {
+ const ch = line[cs - 1];
+ if (ch === ' ' || ch === '\t')
+ dirEnd = cs - 1;
+ }
+ while (true) {
+ const ch = line[dirEnd - 1];
+ if (ch === ' ' || ch === '\t')
+ dirEnd -= 1;
+ else
+ break;
+ }
+ const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));
+ yield* this.pushCount(line.length - n); // possible comment
+ this.pushNewline();
+ return 'stream';
+ }
+ if (this.atLineEnd()) {
+ const sp = yield* this.pushSpaces(true);
+ yield* this.pushCount(line.length - sp);
+ yield* this.pushNewline();
+ return 'stream';
+ }
+ yield cst.DOCUMENT;
+ return yield* this.parseLineStart();
+ }
+ *parseLineStart() {
+ const ch = this.charAt(0);
+ if (!ch && !this.atEnd)
+ return this.setNext('line-start');
+ if (ch === '-' || ch === '.') {
+ if (!this.atEnd && !this.hasChars(4))
+ return this.setNext('line-start');
+ const s = this.peek(3);
+ if (s === '---' && isEmpty(this.charAt(3))) {
+ yield* this.pushCount(3);
+ this.indentValue = 0;
+ this.indentNext = 0;
+ return 'doc';
+ }
+ else if (s === '...' && isEmpty(this.charAt(3))) {
+ yield* this.pushCount(3);
+ return 'stream';
+ }
+ }
+ this.indentValue = yield* this.pushSpaces(false);
+ if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))
+ this.indentNext = this.indentValue;
+ return yield* this.parseBlockStart();
+ }
+ *parseBlockStart() {
+ const [ch0, ch1] = this.peek(2);
+ if (!ch1 && !this.atEnd)
+ return this.setNext('block-start');
+ if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {
+ const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));
+ this.indentNext = this.indentValue + 1;
+ this.indentValue += n;
+ return yield* this.parseBlockStart();
+ }
+ return 'doc';
+ }
+ *parseDocument() {
+ yield* this.pushSpaces(true);
+ const line = this.getLine();
+ if (line === null)
+ return this.setNext('doc');
+ let n = yield* this.pushIndicators();
+ switch (line[n]) {
+ case '#':
+ yield* this.pushCount(line.length - n);
+ // fallthrough
+ case undefined:
+ yield* this.pushNewline();
+ return yield* this.parseLineStart();
+ case '{':
+ case '[':
+ yield* this.pushCount(1);
+ this.flowKey = false;
+ this.flowLevel = 1;
+ return 'flow';
+ case '}':
+ case ']':
+ // this is an error
+ yield* this.pushCount(1);
+ return 'doc';
+ case '*':
+ yield* this.pushUntil(isNotAnchorChar);
+ return 'doc';
+ case '"':
+ case "'":
+ return yield* this.parseQuotedScalar();
+ case '|':
+ case '>':
+ n += yield* this.parseBlockScalarHeader();
+ n += yield* this.pushSpaces(true);
+ yield* this.pushCount(line.length - n);
+ yield* this.pushNewline();
+ return yield* this.parseBlockScalar();
+ default:
+ return yield* this.parsePlainScalar();
+ }
+ }
+ *parseFlowCollection() {
+ let nl, sp;
+ let indent = -1;
+ do {
+ nl = yield* this.pushNewline();
+ if (nl > 0) {
+ sp = yield* this.pushSpaces(false);
+ this.indentValue = indent = sp;
+ }
+ else {
+ sp = 0;
+ }
+ sp += yield* this.pushSpaces(true);
+ } while (nl + sp > 0);
+ const line = this.getLine();
+ if (line === null)
+ return this.setNext('flow');
+ if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||
+ (indent === 0 &&
+ (line.startsWith('---') || line.startsWith('...')) &&
+ isEmpty(line[3]))) {
+ // Allowing for the terminal ] or } at the same (rather than greater)
+ // indent level as the initial [ or { is technically invalid, but
+ // failing here would be surprising to users.
+ const atFlowEndMarker = indent === this.indentNext - 1 &&
+ this.flowLevel === 1 &&
+ (line[0] === ']' || line[0] === '}');
+ if (!atFlowEndMarker) {
+ // this is an error
+ this.flowLevel = 0;
+ yield cst.FLOW_END;
+ return yield* this.parseLineStart();
+ }
+ }
+ let n = 0;
+ while (line[n] === ',') {
+ n += yield* this.pushCount(1);
+ n += yield* this.pushSpaces(true);
+ this.flowKey = false;
+ }
+ n += yield* this.pushIndicators();
+ switch (line[n]) {
+ case undefined:
+ return 'flow';
+ case '#':
+ yield* this.pushCount(line.length - n);
+ return 'flow';
+ case '{':
+ case '[':
+ yield* this.pushCount(1);
+ this.flowKey = false;
+ this.flowLevel += 1;
+ return 'flow';
+ case '}':
+ case ']':
+ yield* this.pushCount(1);
+ this.flowKey = true;
+ this.flowLevel -= 1;
+ return this.flowLevel ? 'flow' : 'doc';
+ case '*':
+ yield* this.pushUntil(isNotAnchorChar);
+ return 'flow';
+ case '"':
+ case "'":
+ this.flowKey = true;
+ return yield* this.parseQuotedScalar();
+ case ':': {
+ const next = this.charAt(1);
+ if (this.flowKey || isEmpty(next) || next === ',') {
+ this.flowKey = false;
+ yield* this.pushCount(1);
+ yield* this.pushSpaces(true);
+ return 'flow';
+ }
+ }
+ // fallthrough
+ default:
+ this.flowKey = false;
+ return yield* this.parsePlainScalar();
+ }
+ }
+ *parseQuotedScalar() {
+ const quote = this.charAt(0);
+ let end = this.buffer.indexOf(quote, this.pos + 1);
+ if (quote === "'") {
+ while (end !== -1 && this.buffer[end + 1] === "'")
+ end = this.buffer.indexOf("'", end + 2);
+ }
+ else {
+ // double-quote
+ while (end !== -1) {
+ let n = 0;
+ while (this.buffer[end - 1 - n] === '\\')
+ n += 1;
+ if (n % 2 === 0)
+ break;
+ end = this.buffer.indexOf('"', end + 1);
+ }
+ }
+ // Only looking for newlines within the quotes
+ const qb = this.buffer.substring(0, end);
+ let nl = qb.indexOf('\n', this.pos);
+ if (nl !== -1) {
+ while (nl !== -1) {
+ const cs = this.continueScalar(nl + 1);
+ if (cs === -1)
+ break;
+ nl = qb.indexOf('\n', cs);
+ }
+ if (nl !== -1) {
+ // this is an error caused by an unexpected unindent
+ end = nl - (qb[nl - 1] === '\r' ? 2 : 1);
+ }
+ }
+ if (end === -1) {
+ if (!this.atEnd)
+ return this.setNext('quoted-scalar');
+ end = this.buffer.length;
+ }
+ yield* this.pushToIndex(end + 1, false);
+ return this.flowLevel ? 'flow' : 'doc';
+ }
+ *parseBlockScalarHeader() {
+ this.blockScalarIndent = -1;
+ this.blockScalarKeep = false;
+ let i = this.pos;
+ while (true) {
+ const ch = this.buffer[++i];
+ if (ch === '+')
+ this.blockScalarKeep = true;
+ else if (ch > '0' && ch <= '9')
+ this.blockScalarIndent = Number(ch) - 1;
+ else if (ch !== '-')
+ break;
+ }
+ return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');
+ }
+ *parseBlockScalar() {
+ let nl = this.pos - 1; // may be -1 if this.pos === 0
+ let indent = 0;
+ let ch;
+ loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {
+ switch (ch) {
+ case ' ':
+ indent += 1;
+ break;
+ case '\n':
+ nl = i;
+ indent = 0;
+ break;
+ case '\r': {
+ const next = this.buffer[i + 1];
+ if (!next && !this.atEnd)
+ return this.setNext('block-scalar');
+ if (next === '\n')
+ break;
+ } // fallthrough
+ default:
+ break loop;
+ }
+ }
+ if (!ch && !this.atEnd)
+ return this.setNext('block-scalar');
+ if (indent >= this.indentNext) {
+ if (this.blockScalarIndent === -1)
+ this.indentNext = indent;
+ else
+ this.indentNext += this.blockScalarIndent;
+ do {
+ const cs = this.continueScalar(nl + 1);
+ if (cs === -1)
+ break;
+ nl = this.buffer.indexOf('\n', cs);
+ } while (nl !== -1);
+ if (nl === -1) {
+ if (!this.atEnd)
+ return this.setNext('block-scalar');
+ nl = this.buffer.length;
+ }
+ }
+ if (!this.blockScalarKeep) {
+ do {
+ let i = nl - 1;
+ let ch = this.buffer[i];
+ if (ch === '\r')
+ ch = this.buffer[--i];
+ const lastChar = i; // Drop the line if last char not more indented
+ while (ch === ' ' || ch === '\t')
+ ch = this.buffer[--i];
+ if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
+ nl = i;
+ else
+ break;
+ } while (true);
+ }
+ yield cst.SCALAR;
+ yield* this.pushToIndex(nl + 1, true);
+ return yield* this.parseLineStart();
+ }
+ *parsePlainScalar() {
+ const inFlow = this.flowLevel > 0;
+ let end = this.pos - 1;
+ let i = this.pos - 1;
+ let ch;
+ while ((ch = this.buffer[++i])) {
+ if (ch === ':') {
+ const next = this.buffer[i + 1];
+ if (isEmpty(next) || (inFlow && next === ','))
+ break;
+ end = i;
+ }
+ else if (isEmpty(ch)) {
+ let next = this.buffer[i + 1];
+ if (ch === '\r') {
+ if (next === '\n') {
+ i += 1;
+ ch = '\n';
+ next = this.buffer[i + 1];
+ }
+ else
+ end = i;
+ }
+ if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))
+ break;
+ if (ch === '\n') {
+ const cs = this.continueScalar(i + 1);
+ if (cs === -1)
+ break;
+ i = Math.max(i, cs - 2); // to advance, but still account for ' #'
+ }
+ }
+ else {
+ if (inFlow && invalidFlowScalarChars.includes(ch))
+ break;
+ end = i;
+ }
+ }
+ if (!ch && !this.atEnd)
+ return this.setNext('plain-scalar');
+ yield cst.SCALAR;
+ yield* this.pushToIndex(end + 1, true);
+ return inFlow ? 'flow' : 'doc';
+ }
+ *pushCount(n) {
+ if (n > 0) {
+ yield this.buffer.substr(this.pos, n);
+ this.pos += n;
+ return n;
+ }
+ return 0;
+ }
+ *pushToIndex(i, allowEmpty) {
+ const s = this.buffer.slice(this.pos, i);
+ if (s) {
+ yield s;
+ this.pos += s.length;
+ return s.length;
+ }
+ else if (allowEmpty)
+ yield '';
+ return 0;
+ }
+ *pushIndicators() {
+ switch (this.charAt(0)) {
+ case '!':
+ return ((yield* this.pushTag()) +
+ (yield* this.pushSpaces(true)) +
+ (yield* this.pushIndicators()));
+ case '&':
+ return ((yield* this.pushUntil(isNotAnchorChar)) +
+ (yield* this.pushSpaces(true)) +
+ (yield* this.pushIndicators()));
+ case '-': // this is an error
+ case '?': // this is an error outside flow collections
+ case ':': {
+ const inFlow = this.flowLevel > 0;
+ const ch1 = this.charAt(1);
+ if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {
+ if (!inFlow)
+ this.indentNext = this.indentValue + 1;
+ else if (this.flowKey)
+ this.flowKey = false;
+ return ((yield* this.pushCount(1)) +
+ (yield* this.pushSpaces(true)) +
+ (yield* this.pushIndicators()));
+ }
+ }
+ }
+ return 0;
+ }
+ *pushTag() {
+ if (this.charAt(1) === '<') {
+ let i = this.pos + 2;
+ let ch = this.buffer[i];
+ while (!isEmpty(ch) && ch !== '>')
+ ch = this.buffer[++i];
+ return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);
+ }
+ else {
+ let i = this.pos + 1;
+ let ch = this.buffer[i];
+ while (ch) {
+ if (tagChars.includes(ch))
+ ch = this.buffer[++i];
+ else if (ch === '%' &&
+ hexDigits.includes(this.buffer[i + 1]) &&
+ hexDigits.includes(this.buffer[i + 2])) {
+ ch = this.buffer[(i += 3)];
+ }
+ else
+ break;
+ }
+ return yield* this.pushToIndex(i, false);
+ }
+ }
+ *pushNewline() {
+ const ch = this.buffer[this.pos];
+ if (ch === '\n')
+ return yield* this.pushCount(1);
+ else if (ch === '\r' && this.charAt(1) === '\n')
+ return yield* this.pushCount(2);
+ else
+ return 0;
+ }
+ *pushSpaces(allowTabs) {
+ let i = this.pos - 1;
+ let ch;
+ do {
+ ch = this.buffer[++i];
+ } while (ch === ' ' || (allowTabs && ch === '\t'));
+ const n = i - this.pos;
+ if (n > 0) {
+ yield this.buffer.substr(this.pos, n);
+ this.pos = i;
+ }
+ return n;
+ }
+ *pushUntil(test) {
+ let i = this.pos;
+ let ch = this.buffer[i];
+ while (!test(ch))
+ ch = this.buffer[++i];
+ return yield* this.pushToIndex(i, false);
+ }
+}
+
+exports.Lexer = Lexer;
diff --git a/node_modules/yaml/dist/parse/line-counter.d.ts b/node_modules/yaml/dist/parse/line-counter.d.ts
new file mode 100644
index 0000000..b469095
--- /dev/null
+++ b/node_modules/yaml/dist/parse/line-counter.d.ts
@@ -0,0 +1,22 @@
+/**
+ * Tracks newlines during parsing in order to provide an efficient API for
+ * determining the one-indexed `{ line, col }` position for any offset
+ * within the input.
+ */
+export declare class LineCounter {
+ lineStarts: number[];
+ /**
+ * Should be called in ascending order. Otherwise, call
+ * `lineCounter.lineStarts.sort()` before calling `linePos()`.
+ */
+ addNewLine: (offset: number) => number;
+ /**
+ * Performs a binary search and returns the 1-indexed { line, col }
+ * position of `offset`. If `line === 0`, `addNewLine` has never been
+ * called or `offset` is before the first known newline.
+ */
+ linePos: (offset: number) => {
+ line: number;
+ col: number;
+ };
+}
diff --git a/node_modules/yaml/dist/parse/line-counter.js b/node_modules/yaml/dist/parse/line-counter.js
new file mode 100644
index 0000000..0e7383b
--- /dev/null
+++ b/node_modules/yaml/dist/parse/line-counter.js
@@ -0,0 +1,41 @@
+'use strict';
+
+/**
+ * Tracks newlines during parsing in order to provide an efficient API for
+ * determining the one-indexed `{ line, col }` position for any offset
+ * within the input.
+ */
+class LineCounter {
+ constructor() {
+ this.lineStarts = [];
+ /**
+ * Should be called in ascending order. Otherwise, call
+ * `lineCounter.lineStarts.sort()` before calling `linePos()`.
+ */
+ this.addNewLine = (offset) => this.lineStarts.push(offset);
+ /**
+ * Performs a binary search and returns the 1-indexed { line, col }
+ * position of `offset`. If `line === 0`, `addNewLine` has never been
+ * called or `offset` is before the first known newline.
+ */
+ this.linePos = (offset) => {
+ let low = 0;
+ let high = this.lineStarts.length;
+ while (low < high) {
+ const mid = (low + high) >> 1; // Math.floor((low + high) / 2)
+ if (this.lineStarts[mid] < offset)
+ low = mid + 1;
+ else
+ high = mid;
+ }
+ if (this.lineStarts[low] === offset)
+ return { line: low + 1, col: 1 };
+ if (low === 0)
+ return { line: 0, col: offset };
+ const start = this.lineStarts[low - 1];
+ return { line: low, col: offset - start + 1 };
+ };
+ }
+}
+
+exports.LineCounter = LineCounter;
diff --git a/node_modules/yaml/dist/parse/parser.d.ts b/node_modules/yaml/dist/parse/parser.d.ts
new file mode 100644
index 0000000..8f3159f
--- /dev/null
+++ b/node_modules/yaml/dist/parse/parser.d.ts
@@ -0,0 +1,84 @@
+import { Token } from './cst.js';
+/**
+ * A YAML concrete syntax tree (CST) parser
+ *
+ * ```ts
+ * const src: string = ...
+ * for (const token of new Parser().parse(src)) {
+ * // token: Token
+ * }
+ * ```
+ *
+ * To use the parser with a user-provided lexer:
+ *
+ * ```ts
+ * function* parse(source: string, lexer: Lexer) {
+ * const parser = new Parser()
+ * for (const lexeme of lexer.lex(source))
+ * yield* parser.next(lexeme)
+ * yield* parser.end()
+ * }
+ *
+ * const src: string = ...
+ * const lexer = new Lexer()
+ * for (const token of parse(src, lexer)) {
+ * // token: Token
+ * }
+ * ```
+ */
+export declare class Parser {
+ private onNewLine?;
+ /** If true, space and sequence indicators count as indentation */
+ private atNewLine;
+ /** If true, next token is a scalar value */
+ private atScalar;
+ /** Current indentation level */
+ private indent;
+ /** Current offset since the start of parsing */
+ offset: number;
+ /** On the same line with a block map key */
+ private onKeyLine;
+ /** Top indicates the node that's currently being built */
+ stack: Token[];
+ /** The source of the current token, set in parse() */
+ private source;
+ /** The type of the current token, set in parse() */
+ private type;
+ /**
+ * @param onNewLine - If defined, called separately with the start position of
+ * each new line (in `parse()`, including the start of input).
+ */
+ constructor(onNewLine?: (offset: number) => void);
+ /**
+ * Parse `source` as a YAML stream.
+ * If `incomplete`, a part of the last line may be left as a buffer for the next call.
+ *
+ * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
+ *
+ * @returns A generator of tokens representing each directive, document, and other structure.
+ */
+ parse(source: string, incomplete?: boolean): Generator<Token, void, unknown>;
+ /**
+ * Advance the parser by the `source` of one lexical token.
+ */
+ next(source: string): Generator<Token, void, unknown>;
+ private lexer;
+ /** Call at end of input to push out any remaining constructions */
+ end(): Generator<Token, void, unknown>;
+ private get sourceToken();
+ private step;
+ private peek;
+ private pop;
+ private stream;
+ private document;
+ private scalar;
+ private blockScalar;
+ private blockMap;
+ private blockSequence;
+ private flowCollection;
+ private flowScalar;
+ private startBlockValue;
+ private atIndentedComment;
+ private documentEnd;
+ private lineEnd;
+}
diff --git a/node_modules/yaml/dist/parse/parser.js b/node_modules/yaml/dist/parse/parser.js
new file mode 100644
index 0000000..820f00a
--- /dev/null
+++ b/node_modules/yaml/dist/parse/parser.js
@@ -0,0 +1,956 @@
+'use strict';
+
+var cst = require('./cst.js');
+var lexer = require('./lexer.js');
+
+function includesToken(list, type) {
+ for (let i = 0; i < list.length; ++i)
+ if (list[i].type === type)
+ return true;
+ return false;
+}
+function includesNonEmpty(list) {
+ for (let i = 0; i < list.length; ++i) {
+ switch (list[i].type) {
+ case 'space':
+ case 'comment':
+ case 'newline':
+ break;
+ default:
+ return true;
+ }
+ }
+ return false;
+}
+function isFlowToken(token) {
+ switch (token === null || token === void 0 ? void 0 : token.type) {
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ case 'flow-collection':
+ return true;
+ default:
+ return false;
+ }
+}
+function getPrevProps(parent) {
+ var _a;
+ switch (parent.type) {
+ case 'document':
+ return parent.start;
+ case 'block-map': {
+ const it = parent.items[parent.items.length - 1];
+ return (_a = it.sep) !== null && _a !== void 0 ? _a : it.start;
+ }
+ case 'block-seq':
+ return parent.items[parent.items.length - 1].start;
+ /* istanbul ignore next should not happen */
+ default:
+ return [];
+ }
+}
+/** Note: May modify input array */
+function getFirstKeyStartProps(prev) {
+ var _a;
+ if (prev.length === 0)
+ return [];
+ let i = prev.length;
+ loop: while (--i >= 0) {
+ switch (prev[i].type) {
+ case 'doc-start':
+ case 'explicit-key-ind':
+ case 'map-value-ind':
+ case 'seq-item-ind':
+ case 'newline':
+ break loop;
+ }
+ }
+ while (((_a = prev[++i]) === null || _a === void 0 ? void 0 : _a.type) === 'space') {
+ /* loop */
+ }
+ return prev.splice(i, prev.length);
+}
+function fixFlowSeqItems(fc) {
+ if (fc.start.type === 'flow-seq-start') {
+ for (const it of fc.items) {
+ if (it.sep &&
+ !it.value &&
+ !includesToken(it.start, 'explicit-key-ind') &&
+ !includesToken(it.sep, 'map-value-ind')) {
+ if (it.key)
+ it.value = it.key;
+ delete it.key;
+ if (isFlowToken(it.value)) {
+ if (it.value.end)
+ Array.prototype.push.apply(it.value.end, it.sep);
+ else
+ it.value.end = it.sep;
+ }
+ else
+ Array.prototype.push.apply(it.start, it.sep);
+ delete it.sep;
+ }
+ }
+ }
+}
+/**
+ * A YAML concrete syntax tree (CST) parser
+ *
+ * ```ts
+ * const src: string = ...
+ * for (const token of new Parser().parse(src)) {
+ * // token: Token
+ * }
+ * ```
+ *
+ * To use the parser with a user-provided lexer:
+ *
+ * ```ts
+ * function* parse(source: string, lexer: Lexer) {
+ * const parser = new Parser()
+ * for (const lexeme of lexer.lex(source))
+ * yield* parser.next(lexeme)
+ * yield* parser.end()
+ * }
+ *
+ * const src: string = ...
+ * const lexer = new Lexer()
+ * for (const token of parse(src, lexer)) {
+ * // token: Token
+ * }
+ * ```
+ */
+class Parser {
+ /**
+ * @param onNewLine - If defined, called separately with the start position of
+ * each new line (in `parse()`, including the start of input).
+ */
+ constructor(onNewLine) {
+ /** If true, space and sequence indicators count as indentation */
+ this.atNewLine = true;
+ /** If true, next token is a scalar value */
+ this.atScalar = false;
+ /** Current indentation level */
+ this.indent = 0;
+ /** Current offset since the start of parsing */
+ this.offset = 0;
+ /** On the same line with a block map key */
+ this.onKeyLine = false;
+ /** Top indicates the node that's currently being built */
+ this.stack = [];
+ /** The source of the current token, set in parse() */
+ this.source = '';
+ /** The type of the current token, set in parse() */
+ this.type = '';
+ // Must be defined after `next()`
+ this.lexer = new lexer.Lexer();
+ this.onNewLine = onNewLine;
+ }
+ /**
+ * Parse `source` as a YAML stream.
+ * If `incomplete`, a part of the last line may be left as a buffer for the next call.
+ *
+ * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
+ *
+ * @returns A generator of tokens representing each directive, document, and other structure.
+ */
+ *parse(source, incomplete = false) {
+ if (this.onNewLine && this.offset === 0)
+ this.onNewLine(0);
+ for (const lexeme of this.lexer.lex(source, incomplete))
+ yield* this.next(lexeme);
+ if (!incomplete)
+ yield* this.end();
+ }
+ /**
+ * Advance the parser by the `source` of one lexical token.
+ */
+ *next(source) {
+ this.source = source;
+ if (process.env.LOG_TOKENS)
+ console.log('|', cst.prettyToken(source));
+ if (this.atScalar) {
+ this.atScalar = false;
+ yield* this.step();
+ this.offset += source.length;
+ return;
+ }
+ const type = cst.tokenType(source);
+ if (!type) {
+ const message = `Not a YAML token: ${source}`;
+ yield* this.pop({ type: 'error', offset: this.offset, message, source });
+ this.offset += source.length;
+ }
+ else if (type === 'scalar') {
+ this.atNewLine = false;
+ this.atScalar = true;
+ this.type = 'scalar';
+ }
+ else {
+ this.type = type;
+ yield* this.step();
+ switch (type) {
+ case 'newline':
+ this.atNewLine = true;
+ this.indent = 0;
+ if (this.onNewLine)
+ this.onNewLine(this.offset + source.length);
+ break;
+ case 'space':
+ if (this.atNewLine && source[0] === ' ')
+ this.indent += source.length;
+ break;
+ case 'explicit-key-ind':
+ case 'map-value-ind':
+ case 'seq-item-ind':
+ if (this.atNewLine)
+ this.indent += source.length;
+ break;
+ case 'doc-mode':
+ case 'flow-error-end':
+ return;
+ default:
+ this.atNewLine = false;
+ }
+ this.offset += source.length;
+ }
+ }
+ /** Call at end of input to push out any remaining constructions */
+ *end() {
+ while (this.stack.length > 0)
+ yield* this.pop();
+ }
+ get sourceToken() {
+ const st = {
+ type: this.type,
+ offset: this.offset,
+ indent: this.indent,
+ source: this.source
+ };
+ return st;
+ }
+ *step() {
+ const top = this.peek(1);
+ if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {
+ while (this.stack.length > 0)
+ yield* this.pop();
+ this.stack.push({
+ type: 'doc-end',
+ offset: this.offset,
+ source: this.source
+ });
+ return;
+ }
+ if (!top)
+ return yield* this.stream();
+ switch (top.type) {
+ case 'document':
+ return yield* this.document(top);
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ return yield* this.scalar(top);
+ case 'block-scalar':
+ return yield* this.blockScalar(top);
+ case 'block-map':
+ return yield* this.blockMap(top);
+ case 'block-seq':
+ return yield* this.blockSequence(top);
+ case 'flow-collection':
+ return yield* this.flowCollection(top);
+ case 'doc-end':
+ return yield* this.documentEnd(top);
+ }
+ /* istanbul ignore next should not happen */
+ yield* this.pop();
+ }
+ peek(n) {
+ return this.stack[this.stack.length - n];
+ }
+ *pop(error) {
+ const token = error !== null && error !== void 0 ? error : this.stack.pop();
+ /* istanbul ignore if should not happen */
+ if (!token) {
+ const message = 'Tried to pop an empty stack';
+ yield { type: 'error', offset: this.offset, source: '', message };
+ }
+ else if (this.stack.length === 0) {
+ yield token;
+ }
+ else {
+ const top = this.peek(1);
+ if (token.type === 'block-scalar') {
+ // Block scalars use their parent rather than header indent
+ token.indent = 'indent' in top ? top.indent : 0;
+ }
+ else if (token.type === 'flow-collection' && top.type === 'document') {
+ // Ignore all indent for top-level flow collections
+ token.indent = 0;
+ }
+ if (token.type === 'flow-collection')
+ fixFlowSeqItems(token);
+ switch (top.type) {
+ case 'document':
+ top.value = token;
+ break;
+ case 'block-scalar':
+ top.props.push(token); // error
+ break;
+ case 'block-map': {
+ const it = top.items[top.items.length - 1];
+ if (it.value) {
+ top.items.push({ start: [], key: token, sep: [] });
+ this.onKeyLine = true;
+ return;
+ }
+ else if (it.sep) {
+ it.value = token;
+ }
+ else {
+ Object.assign(it, { key: token, sep: [] });
+ this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');
+ return;
+ }
+ break;
+ }
+ case 'block-seq': {
+ const it = top.items[top.items.length - 1];
+ if (it.value)
+ top.items.push({ start: [], value: token });
+ else
+ it.value = token;
+ break;
+ }
+ case 'flow-collection': {
+ const it = top.items[top.items.length - 1];
+ if (!it || it.value)
+ top.items.push({ start: [], key: token, sep: [] });
+ else if (it.sep)
+ it.value = token;
+ else
+ Object.assign(it, { key: token, sep: [] });
+ return;
+ }
+ /* istanbul ignore next should not happen */
+ default:
+ yield* this.pop();
+ yield* this.pop(token);
+ }
+ if ((top.type === 'document' ||
+ top.type === 'block-map' ||
+ top.type === 'block-seq') &&
+ (token.type === 'block-map' || token.type === 'block-seq')) {
+ const last = token.items[token.items.length - 1];
+ if (last &&
+ !last.sep &&
+ !last.value &&
+ last.start.length > 0 &&
+ !includesNonEmpty(last.start) &&
+ (token.indent === 0 ||
+ last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {
+ if (top.type === 'document')
+ top.end = last.start;
+ else
+ top.items.push({ start: last.start });
+ token.items.splice(-1, 1);
+ }
+ }
+ }
+ }
+ *stream() {
+ switch (this.type) {
+ case 'directive-line':
+ yield { type: 'directive', offset: this.offset, source: this.source };
+ return;
+ case 'byte-order-mark':
+ case 'space':
+ case 'comment':
+ case 'newline':
+ yield this.sourceToken;
+ return;
+ case 'doc-mode':
+ case 'doc-start': {
+ const doc = {
+ type: 'document',
+ offset: this.offset,
+ start: []
+ };
+ if (this.type === 'doc-start')
+ doc.start.push(this.sourceToken);
+ this.stack.push(doc);
+ return;
+ }
+ }
+ yield {
+ type: 'error',
+ offset: this.offset,
+ message: `Unexpected ${this.type} token in YAML stream`,
+ source: this.source
+ };
+ }
+ *document(doc) {
+ if (doc.value)
+ return yield* this.lineEnd(doc);
+ switch (this.type) {
+ case 'doc-start': {
+ if (includesNonEmpty(doc.start)) {
+ yield* this.pop();
+ yield* this.step();
+ }
+ else
+ doc.start.push(this.sourceToken);
+ return;
+ }
+ case 'anchor':
+ case 'tag':
+ case 'space':
+ case 'comment':
+ case 'newline':
+ doc.start.push(this.sourceToken);
+ return;
+ }
+ const bv = this.startBlockValue(doc);
+ if (bv)
+ this.stack.push(bv);
+ else {
+ yield {
+ type: 'error',
+ offset: this.offset,
+ message: `Unexpected ${this.type} token in YAML document`,
+ source: this.source
+ };
+ }
+ }
+ *scalar(scalar) {
+ if (this.type === 'map-value-ind') {
+ const prev = getPrevProps(this.peek(2));
+ const start = getFirstKeyStartProps(prev);
+ let sep;
+ if (scalar.end) {
+ sep = scalar.end;
+ sep.push(this.sourceToken);
+ delete scalar.end;
+ }
+ else
+ sep = [this.sourceToken];
+ const map = {
+ type: 'block-map',
+ offset: scalar.offset,
+ indent: scalar.indent,
+ items: [{ start, key: scalar, sep }]
+ };
+ this.onKeyLine = true;
+ this.stack[this.stack.length - 1] = map;
+ }
+ else
+ yield* this.lineEnd(scalar);
+ }
+ *blockScalar(scalar) {
+ switch (this.type) {
+ case 'space':
+ case 'comment':
+ case 'newline':
+ scalar.props.push(this.sourceToken);
+ return;
+ case 'scalar':
+ scalar.source = this.source;
+ // block-scalar source includes trailing newline
+ this.atNewLine = true;
+ this.indent = 0;
+ if (this.onNewLine) {
+ let nl = this.source.indexOf('\n') + 1;
+ while (nl !== 0) {
+ this.onNewLine(this.offset + nl);
+ nl = this.source.indexOf('\n', nl) + 1;
+ }
+ }
+ yield* this.pop();
+ break;
+ /* istanbul ignore next should not happen */
+ default:
+ yield* this.pop();
+ yield* this.step();
+ }
+ }
+ *blockMap(map) {
+ var _a;
+ const it = map.items[map.items.length - 1];
+ // it.sep is true-ish if pair already has key or : separator
+ switch (this.type) {
+ case 'newline':
+ this.onKeyLine = false;
+ if (it.value) {
+ const end = 'end' in it.value ? it.value.end : undefined;
+ const last = Array.isArray(end) ? end[end.length - 1] : undefined;
+ if ((last === null || last === void 0 ? void 0 : last.type) === 'comment')
+ end === null || end === void 0 ? void 0 : end.push(this.sourceToken);
+ else
+ map.items.push({ start: [this.sourceToken] });
+ }
+ else if (it.sep)
+ it.sep.push(this.sourceToken);
+ else
+ it.start.push(this.sourceToken);
+ return;
+ case 'space':
+ case 'comment':
+ if (it.value)
+ map.items.push({ start: [this.sourceToken] });
+ else if (it.sep)
+ it.sep.push(this.sourceToken);
+ else {
+ if (this.atIndentedComment(it.start, map.indent)) {
+ const prev = map.items[map.items.length - 2];
+ const end = (_a = prev === null || prev === void 0 ? void 0 : prev.value) === null || _a === void 0 ? void 0 : _a.end;
+ if (Array.isArray(end)) {
+ Array.prototype.push.apply(end, it.start);
+ end.push(this.sourceToken);
+ map.items.pop();
+ return;
+ }
+ }
+ it.start.push(this.sourceToken);
+ }
+ return;
+ }
+ if (this.indent >= map.indent) {
+ const atNextItem = !this.onKeyLine &&
+ this.indent === map.indent &&
+ (it.sep || includesNonEmpty(it.start));
+ // For empty nodes, assign newline-separated not indented empty tokens to following node
+ let start = [];
+ if (atNextItem && it.sep && !it.value) {
+ const nl = [];
+ for (let i = 0; i < it.sep.length; ++i) {
+ const st = it.sep[i];
+ switch (st.type) {
+ case 'newline':
+ nl.push(i);
+ break;
+ case 'space':
+ break;
+ case 'comment':
+ if (st.indent > map.indent)
+ nl.length = 0;
+ break;
+ default:
+ nl.length = 0;
+ }
+ }
+ if (nl.length >= 2)
+ start = it.sep.splice(nl[1]);
+ }
+ switch (this.type) {
+ case 'anchor':
+ case 'tag':
+ if (atNextItem || it.value) {
+ start.push(this.sourceToken);
+ map.items.push({ start });
+ this.onKeyLine = true;
+ }
+ else if (it.sep) {
+ it.sep.push(this.sourceToken);
+ }
+ else {
+ it.start.push(this.sourceToken);
+ }
+ return;
+ case 'explicit-key-ind':
+ if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {
+ it.start.push(this.sourceToken);
+ }
+ else if (atNextItem || it.value) {
+ start.push(this.sourceToken);
+ map.items.push({ start });
+ }
+ else {
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start: [this.sourceToken] }]
+ });
+ }
+ this.onKeyLine = true;
+ return;
+ case 'map-value-ind':
+ if (includesToken(it.start, 'explicit-key-ind')) {
+ if (!it.sep) {
+ if (includesToken(it.start, 'newline')) {
+ Object.assign(it, { key: null, sep: [this.sourceToken] });
+ }
+ else {
+ const start = getFirstKeyStartProps(it.start);
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start, key: null, sep: [this.sourceToken] }]
+ });
+ }
+ }
+ else if (it.value) {
+ map.items.push({ start: [], key: null, sep: [this.sourceToken] });
+ }
+ else if (includesToken(it.sep, 'map-value-ind')) {
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start, key: null, sep: [this.sourceToken] }]
+ });
+ }
+ else if (isFlowToken(it.key) &&
+ !includesToken(it.sep, 'newline')) {
+ const start = getFirstKeyStartProps(it.start);
+ const key = it.key;
+ const sep = it.sep;
+ sep.push(this.sourceToken);
+ // @ts-expect-error type guard is wrong here
+ delete it.key, delete it.sep;
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start, key, sep }]
+ });
+ }
+ else if (start.length > 0) {
+ // Not actually at next item
+ it.sep = it.sep.concat(start, this.sourceToken);
+ }
+ else {
+ it.sep.push(this.sourceToken);
+ }
+ }
+ else {
+ if (!it.sep) {
+ Object.assign(it, { key: null, sep: [this.sourceToken] });
+ }
+ else if (it.value || atNextItem) {
+ map.items.push({ start, key: null, sep: [this.sourceToken] });
+ }
+ else if (includesToken(it.sep, 'map-value-ind')) {
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start: [], key: null, sep: [this.sourceToken] }]
+ });
+ }
+ else {
+ it.sep.push(this.sourceToken);
+ }
+ }
+ this.onKeyLine = true;
+ return;
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar': {
+ const fs = this.flowScalar(this.type);
+ if (atNextItem || it.value) {
+ map.items.push({ start, key: fs, sep: [] });
+ this.onKeyLine = true;
+ }
+ else if (it.sep) {
+ this.stack.push(fs);
+ }
+ else {
+ Object.assign(it, { key: fs, sep: [] });
+ this.onKeyLine = true;
+ }
+ return;
+ }
+ default: {
+ const bv = this.startBlockValue(map);
+ if (bv) {
+ if (atNextItem &&
+ bv.type !== 'block-seq' &&
+ includesToken(it.start, 'explicit-key-ind')) {
+ map.items.push({ start });
+ }
+ this.stack.push(bv);
+ return;
+ }
+ }
+ }
+ }
+ yield* this.pop();
+ yield* this.step();
+ }
+ *blockSequence(seq) {
+ var _a;
+ const it = seq.items[seq.items.length - 1];
+ switch (this.type) {
+ case 'newline':
+ if (it.value) {
+ const end = 'end' in it.value ? it.value.end : undefined;
+ const last = Array.isArray(end) ? end[end.length - 1] : undefined;
+ if ((last === null || last === void 0 ? void 0 : last.type) === 'comment')
+ end === null || end === void 0 ? void 0 : end.push(this.sourceToken);
+ else
+ seq.items.push({ start: [this.sourceToken] });
+ }
+ else
+ it.start.push(this.sourceToken);
+ return;
+ case 'space':
+ case 'comment':
+ if (it.value)
+ seq.items.push({ start: [this.sourceToken] });
+ else {
+ if (this.atIndentedComment(it.start, seq.indent)) {
+ const prev = seq.items[seq.items.length - 2];
+ const end = (_a = prev === null || prev === void 0 ? void 0 : prev.value) === null || _a === void 0 ? void 0 : _a.end;
+ if (Array.isArray(end)) {
+ Array.prototype.push.apply(end, it.start);
+ end.push(this.sourceToken);
+ seq.items.pop();
+ return;
+ }
+ }
+ it.start.push(this.sourceToken);
+ }
+ return;
+ case 'anchor':
+ case 'tag':
+ if (it.value || this.indent <= seq.indent)
+ break;
+ it.start.push(this.sourceToken);
+ return;
+ case 'seq-item-ind':
+ if (this.indent !== seq.indent)
+ break;
+ if (it.value || includesToken(it.start, 'seq-item-ind'))
+ seq.items.push({ start: [this.sourceToken] });
+ else
+ it.start.push(this.sourceToken);
+ return;
+ }
+ if (this.indent > seq.indent) {
+ const bv = this.startBlockValue(seq);
+ if (bv) {
+ this.stack.push(bv);
+ return;
+ }
+ }
+ yield* this.pop();
+ yield* this.step();
+ }
+ *flowCollection(fc) {
+ const it = fc.items[fc.items.length - 1];
+ if (this.type === 'flow-error-end') {
+ let top;
+ do {
+ yield* this.pop();
+ top = this.peek(1);
+ } while (top && top.type === 'flow-collection');
+ }
+ else if (fc.end.length === 0) {
+ switch (this.type) {
+ case 'comma':
+ case 'explicit-key-ind':
+ if (!it || it.sep)
+ fc.items.push({ start: [this.sourceToken] });
+ else
+ it.start.push(this.sourceToken);
+ return;
+ case 'map-value-ind':
+ if (!it || it.value)
+ fc.items.push({ start: [], key: null, sep: [this.sourceToken] });
+ else if (it.sep)
+ it.sep.push(this.sourceToken);
+ else
+ Object.assign(it, { key: null, sep: [this.sourceToken] });
+ return;
+ case 'space':
+ case 'comment':
+ case 'newline':
+ case 'anchor':
+ case 'tag':
+ if (!it || it.value)
+ fc.items.push({ start: [this.sourceToken] });
+ else if (it.sep)
+ it.sep.push(this.sourceToken);
+ else
+ it.start.push(this.sourceToken);
+ return;
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar': {
+ const fs = this.flowScalar(this.type);
+ if (!it || it.value)
+ fc.items.push({ start: [], key: fs, sep: [] });
+ else if (it.sep)
+ this.stack.push(fs);
+ else
+ Object.assign(it, { key: fs, sep: [] });
+ return;
+ }
+ case 'flow-map-end':
+ case 'flow-seq-end':
+ fc.end.push(this.sourceToken);
+ return;
+ }
+ const bv = this.startBlockValue(fc);
+ /* istanbul ignore else should not happen */
+ if (bv)
+ this.stack.push(bv);
+ else {
+ yield* this.pop();
+ yield* this.step();
+ }
+ }
+ else {
+ const parent = this.peek(2);
+ if (parent.type === 'block-map' &&
+ ((this.type === 'map-value-ind' && parent.indent === fc.indent) ||
+ (this.type === 'newline' &&
+ !parent.items[parent.items.length - 1].sep))) {
+ yield* this.pop();
+ yield* this.step();
+ }
+ else if (this.type === 'map-value-ind' &&
+ parent.type !== 'flow-collection') {
+ const prev = getPrevProps(parent);
+ const start = getFirstKeyStartProps(prev);
+ fixFlowSeqItems(fc);
+ const sep = fc.end.splice(1, fc.end.length);
+ sep.push(this.sourceToken);
+ const map = {
+ type: 'block-map',
+ offset: fc.offset,
+ indent: fc.indent,
+ items: [{ start, key: fc, sep }]
+ };
+ this.onKeyLine = true;
+ this.stack[this.stack.length - 1] = map;
+ }
+ else {
+ yield* this.lineEnd(fc);
+ }
+ }
+ }
+ flowScalar(type) {
+ if (this.onNewLine) {
+ let nl = this.source.indexOf('\n') + 1;
+ while (nl !== 0) {
+ this.onNewLine(this.offset + nl);
+ nl = this.source.indexOf('\n', nl) + 1;
+ }
+ }
+ return {
+ type,
+ offset: this.offset,
+ indent: this.indent,
+ source: this.source
+ };
+ }
+ startBlockValue(parent) {
+ switch (this.type) {
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ return this.flowScalar(this.type);
+ case 'block-scalar-header':
+ return {
+ type: 'block-scalar',
+ offset: this.offset,
+ indent: this.indent,
+ props: [this.sourceToken],
+ source: ''
+ };
+ case 'flow-map-start':
+ case 'flow-seq-start':
+ return {
+ type: 'flow-collection',
+ offset: this.offset,
+ indent: this.indent,
+ start: this.sourceToken,
+ items: [],
+ end: []
+ };
+ case 'seq-item-ind':
+ return {
+ type: 'block-seq',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start: [this.sourceToken] }]
+ };
+ case 'explicit-key-ind': {
+ this.onKeyLine = true;
+ const prev = getPrevProps(parent);
+ const start = getFirstKeyStartProps(prev);
+ start.push(this.sourceToken);
+ return {
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start }]
+ };
+ }
+ case 'map-value-ind': {
+ this.onKeyLine = true;
+ const prev = getPrevProps(parent);
+ const start = getFirstKeyStartProps(prev);
+ return {
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start, key: null, sep: [this.sourceToken] }]
+ };
+ }
+ }
+ return null;
+ }
+ atIndentedComment(start, indent) {
+ if (this.type !== 'comment')
+ return false;
+ if (this.indent <= indent)
+ return false;
+ return start.every(st => st.type === 'newline' || st.type === 'space');
+ }
+ *documentEnd(docEnd) {
+ if (this.type !== 'doc-mode') {
+ if (docEnd.end)
+ docEnd.end.push(this.sourceToken);
+ else
+ docEnd.end = [this.sourceToken];
+ if (this.type === 'newline')
+ yield* this.pop();
+ }
+ }
+ *lineEnd(token) {
+ switch (this.type) {
+ case 'comma':
+ case 'doc-start':
+ case 'doc-end':
+ case 'flow-seq-end':
+ case 'flow-map-end':
+ case 'map-value-ind':
+ yield* this.pop();
+ yield* this.step();
+ break;
+ case 'newline':
+ this.onKeyLine = false;
+ // fallthrough
+ case 'space':
+ case 'comment':
+ default:
+ // all other values are errors
+ if (token.end)
+ token.end.push(this.sourceToken);
+ else
+ token.end = [this.sourceToken];
+ if (this.type === 'newline')
+ yield* this.pop();
+ }
+ }
+}
+
+exports.Parser = Parser;
diff --git a/node_modules/yaml/dist/public-api.d.ts b/node_modules/yaml/dist/public-api.d.ts
new file mode 100644
index 0000000..82a4d4f
--- /dev/null
+++ b/node_modules/yaml/dist/public-api.d.ts
@@ -0,0 +1,43 @@
+import { Composer } from './compose/composer.js';
+import type { Reviver } from './doc/applyReviver.js';
+import { Document, Replacer } from './doc/Document.js';
+import type { ParsedNode } from './nodes/Node.js';
+import type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from './options.js';
+export interface EmptyStream extends Array<Document.Parsed>, ReturnType<Composer['streamInfo']> {
+ empty: true;
+}
+/**
+ * Parse the input as a stream of YAML documents.
+ *
+ * Documents should be separated from each other by `...` or `---` marker lines.
+ *
+ * @returns If an empty `docs` array is returned, it will be of type
+ * EmptyStream and contain additional stream information. In
+ * TypeScript, you should use `'empty' in docs` as a type guard for it.
+ */
+export declare function parseAllDocuments<T extends ParsedNode = ParsedNode>(source: string, options?: ParseOptions & DocumentOptions & SchemaOptions): Document.Parsed<T>[] | EmptyStream;
+/** Parse an input string into a single YAML.Document */
+export declare function parseDocument<T extends ParsedNode = ParsedNode>(source: string, options?: ParseOptions & DocumentOptions & SchemaOptions): Document.Parsed<T>;
+/**
+ * Parse an input string into JavaScript.
+ *
+ * Only supports input consisting of a single YAML document; for multi-document
+ * support you should use `YAML.parseAllDocuments`. May throw on error, and may
+ * log warnings using `console.warn`.
+ *
+ * @param str - A string with YAML formatting.
+ * @param reviver - A reviver function, as in `JSON.parse()`
+ * @returns The value will match the type of the root value of the parsed YAML
+ * document, so Maps become objects, Sequences arrays, and scalars result in
+ * nulls, booleans, numbers and strings.
+ */
+export declare function parse(src: string, options?: ParseOptions & DocumentOptions & SchemaOptions & ToJSOptions): any;
+export declare function parse(src: string, reviver: Reviver, options?: ParseOptions & DocumentOptions & SchemaOptions & ToJSOptions): any;
+/**
+ * Stringify a value as a YAML document.
+ *
+ * @param replacer - A replacer array or function, as in `JSON.stringify()`
+ * @returns Will always include `\n` as the last character, as is expected of YAML documents.
+ */
+export declare function stringify(value: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions & ToStringOptions): string;
+export declare function stringify(value: any, replacer?: Replacer | null, options?: string | number | (DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions & ToStringOptions)): string;
diff --git a/node_modules/yaml/dist/public-api.js b/node_modules/yaml/dist/public-api.js
new file mode 100644
index 0000000..6f853a1
--- /dev/null
+++ b/node_modules/yaml/dist/public-api.js
@@ -0,0 +1,105 @@
+'use strict';
+
+var composer = require('./compose/composer.js');
+var Document = require('./doc/Document.js');
+var errors = require('./errors.js');
+var log = require('./log.js');
+var lineCounter = require('./parse/line-counter.js');
+var parser = require('./parse/parser.js');
+
+function parseOptions(options) {
+ const prettyErrors = options.prettyErrors !== false;
+ const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null;
+ return { lineCounter: lineCounter$1, prettyErrors };
+}
+/**
+ * Parse the input as a stream of YAML documents.
+ *
+ * Documents should be separated from each other by `...` or `---` marker lines.
+ *
+ * @returns If an empty `docs` array is returned, it will be of type
+ * EmptyStream and contain additional stream information. In
+ * TypeScript, you should use `'empty' in docs` as a type guard for it.
+ */
+function parseAllDocuments(source, options = {}) {
+ const { lineCounter, prettyErrors } = parseOptions(options);
+ const parser$1 = new parser.Parser(lineCounter === null || lineCounter === void 0 ? void 0 : lineCounter.addNewLine);
+ const composer$1 = new composer.Composer(options);
+ const docs = Array.from(composer$1.compose(parser$1.parse(source)));
+ if (prettyErrors && lineCounter)
+ for (const doc of docs) {
+ doc.errors.forEach(errors.prettifyError(source, lineCounter));
+ doc.warnings.forEach(errors.prettifyError(source, lineCounter));
+ }
+ if (docs.length > 0)
+ return docs;
+ return Object.assign([], { empty: true }, composer$1.streamInfo());
+}
+/** Parse an input string into a single YAML.Document */
+function parseDocument(source, options = {}) {
+ const { lineCounter, prettyErrors } = parseOptions(options);
+ const parser$1 = new parser.Parser(lineCounter === null || lineCounter === void 0 ? void 0 : lineCounter.addNewLine);
+ const composer$1 = new composer.Composer(options);
+ // `doc` is always set by compose.end(true) at the very latest
+ let doc = null;
+ for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) {
+ if (!doc)
+ doc = _doc;
+ else if (doc.options.logLevel !== 'silent') {
+ doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));
+ break;
+ }
+ }
+ if (prettyErrors && lineCounter) {
+ doc.errors.forEach(errors.prettifyError(source, lineCounter));
+ doc.warnings.forEach(errors.prettifyError(source, lineCounter));
+ }
+ return doc;
+}
+function parse(src, reviver, options) {
+ let _reviver = undefined;
+ if (typeof reviver === 'function') {
+ _reviver = reviver;
+ }
+ else if (options === undefined && reviver && typeof reviver === 'object') {
+ options = reviver;
+ }
+ const doc = parseDocument(src, options);
+ if (!doc)
+ return null;
+ doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning));
+ if (doc.errors.length > 0) {
+ if (doc.options.logLevel !== 'silent')
+ throw doc.errors[0];
+ else
+ doc.errors = [];
+ }
+ return doc.toJS(Object.assign({ reviver: _reviver }, options));
+}
+function stringify(value, replacer, options) {
+ var _a;
+ let _replacer = null;
+ if (typeof replacer === 'function' || Array.isArray(replacer)) {
+ _replacer = replacer;
+ }
+ else if (options === undefined && replacer) {
+ options = replacer;
+ }
+ if (typeof options === 'string')
+ options = options.length;
+ if (typeof options === 'number') {
+ const indent = Math.round(options);
+ options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };
+ }
+ if (value === undefined) {
+ const { keepUndefined } = (_a = options !== null && options !== void 0 ? options : replacer) !== null && _a !== void 0 ? _a : {};
+ if (!keepUndefined)
+ return undefined;
+ }
+ return new Document.Document(value, _replacer, options).toString(options);
+}
+
+exports.parse = parse;
+exports.parseAllDocuments = parseAllDocuments;
+exports.parseDocument = parseDocument;
+exports.stringify = stringify;
diff --git a/node_modules/yaml/dist/schema/Schema.d.ts b/node_modules/yaml/dist/schema/Schema.d.ts
new file mode 100644
index 0000000..42c9c41
--- /dev/null
+++ b/node_modules/yaml/dist/schema/Schema.d.ts
@@ -0,0 +1,18 @@
+import { MAP, SCALAR, SEQ } from '../nodes/Node.js';
+import type { Pair } from '../nodes/Pair.js';
+import type { SchemaOptions, ToStringOptions } from '../options.js';
+import type { CollectionTag, ScalarTag } from './types.js';
+export declare class Schema {
+ compat: Array<CollectionTag | ScalarTag> | null;
+ knownTags: Record<string, CollectionTag | ScalarTag>;
+ merge: boolean;
+ name: string;
+ sortMapEntries: ((a: Pair, b: Pair) => number) | null;
+ tags: Array<CollectionTag | ScalarTag>;
+ toStringOptions: Readonly<ToStringOptions> | null;
+ [MAP]: CollectionTag;
+ [SCALAR]: ScalarTag;
+ [SEQ]: CollectionTag;
+ constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }: SchemaOptions);
+ clone(): Schema;
+}
diff --git a/node_modules/yaml/dist/schema/Schema.js b/node_modules/yaml/dist/schema/Schema.js
new file mode 100644
index 0000000..4c409fc
--- /dev/null
+++ b/node_modules/yaml/dist/schema/Schema.js
@@ -0,0 +1,40 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var map = require('./common/map.js');
+var seq = require('./common/seq.js');
+var string = require('./common/string.js');
+var tags = require('./tags.js');
+
+const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
+class Schema {
+ constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {
+ this.compat = Array.isArray(compat)
+ ? tags.getTags(compat, 'compat')
+ : compat
+ ? tags.getTags(null, compat)
+ : null;
+ this.merge = !!merge;
+ this.name = (typeof schema === 'string' && schema) || 'core';
+ this.knownTags = resolveKnownTags ? tags.coreKnownTags : {};
+ this.tags = tags.getTags(customTags, this.name);
+ this.toStringOptions = toStringDefaults !== null && toStringDefaults !== void 0 ? toStringDefaults : null;
+ Object.defineProperty(this, Node.MAP, { value: map.map });
+ Object.defineProperty(this, Node.SCALAR, { value: string.string });
+ Object.defineProperty(this, Node.SEQ, { value: seq.seq });
+ // Used by createMap()
+ this.sortMapEntries =
+ typeof sortMapEntries === 'function'
+ ? sortMapEntries
+ : sortMapEntries === true
+ ? sortMapEntriesByKey
+ : null;
+ }
+ clone() {
+ const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));
+ copy.tags = this.tags.slice();
+ return copy;
+ }
+}
+
+exports.Schema = Schema;
diff --git a/node_modules/yaml/dist/schema/common/map.d.ts b/node_modules/yaml/dist/schema/common/map.d.ts
new file mode 100644
index 0000000..9b300f8
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/map.d.ts
@@ -0,0 +1,2 @@
+import type { CollectionTag } from '../types.js';
+export declare const map: CollectionTag;
diff --git a/node_modules/yaml/dist/schema/common/map.js b/node_modules/yaml/dist/schema/common/map.js
new file mode 100644
index 0000000..46dab3c
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/map.js
@@ -0,0 +1,44 @@
+'use strict';
+
+var Node = require('../../nodes/Node.js');
+var Pair = require('../../nodes/Pair.js');
+var YAMLMap = require('../../nodes/YAMLMap.js');
+
+function createMap(schema, obj, ctx) {
+ const { keepUndefined, replacer } = ctx;
+ const map = new YAMLMap.YAMLMap(schema);
+ const add = (key, value) => {
+ if (typeof replacer === 'function')
+ value = replacer.call(obj, key, value);
+ else if (Array.isArray(replacer) && !replacer.includes(key))
+ return;
+ if (value !== undefined || keepUndefined)
+ map.items.push(Pair.createPair(key, value, ctx));
+ };
+ if (obj instanceof Map) {
+ for (const [key, value] of obj)
+ add(key, value);
+ }
+ else if (obj && typeof obj === 'object') {
+ for (const key of Object.keys(obj))
+ add(key, obj[key]);
+ }
+ if (typeof schema.sortMapEntries === 'function') {
+ map.items.sort(schema.sortMapEntries);
+ }
+ return map;
+}
+const map = {
+ collection: 'map',
+ createNode: createMap,
+ default: true,
+ nodeClass: YAMLMap.YAMLMap,
+ tag: 'tag:yaml.org,2002:map',
+ resolve(map, onError) {
+ if (!Node.isMap(map))
+ onError('Expected a mapping for this tag');
+ return map;
+ }
+};
+
+exports.map = map;
diff --git a/node_modules/yaml/dist/schema/common/null.d.ts b/node_modules/yaml/dist/schema/common/null.d.ts
new file mode 100644
index 0000000..66abea5
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/null.d.ts
@@ -0,0 +1,4 @@
+import type { ScalarTag } from '../types.js';
+export declare const nullTag: ScalarTag & {
+ test: RegExp;
+};
diff --git a/node_modules/yaml/dist/schema/common/null.js b/node_modules/yaml/dist/schema/common/null.js
new file mode 100644
index 0000000..cb353a7
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/null.js
@@ -0,0 +1,17 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+
+const nullTag = {
+ identify: value => value == null,
+ createNode: () => new Scalar.Scalar(null),
+ default: true,
+ tag: 'tag:yaml.org,2002:null',
+ test: /^(?:~|[Nn]ull|NULL)?$/,
+ resolve: () => new Scalar.Scalar(null),
+ stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)
+ ? source
+ : ctx.options.nullStr
+};
+
+exports.nullTag = nullTag;
diff --git a/node_modules/yaml/dist/schema/common/seq.d.ts b/node_modules/yaml/dist/schema/common/seq.d.ts
new file mode 100644
index 0000000..c038d30
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/seq.d.ts
@@ -0,0 +1,2 @@
+import type { CollectionTag } from '../types.js';
+export declare const seq: CollectionTag;
diff --git a/node_modules/yaml/dist/schema/common/seq.js b/node_modules/yaml/dist/schema/common/seq.js
new file mode 100644
index 0000000..0c48bde
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/seq.js
@@ -0,0 +1,35 @@
+'use strict';
+
+var createNode = require('../../doc/createNode.js');
+var Node = require('../../nodes/Node.js');
+var YAMLSeq = require('../../nodes/YAMLSeq.js');
+
+function createSeq(schema, obj, ctx) {
+ const { replacer } = ctx;
+ const seq = new YAMLSeq.YAMLSeq(schema);
+ if (obj && Symbol.iterator in Object(obj)) {
+ let i = 0;
+ for (let it of obj) {
+ if (typeof replacer === 'function') {
+ const key = obj instanceof Set ? it : String(i++);
+ it = replacer.call(obj, key, it);
+ }
+ seq.items.push(createNode.createNode(it, undefined, ctx));
+ }
+ }
+ return seq;
+}
+const seq = {
+ collection: 'seq',
+ createNode: createSeq,
+ default: true,
+ nodeClass: YAMLSeq.YAMLSeq,
+ tag: 'tag:yaml.org,2002:seq',
+ resolve(seq, onError) {
+ if (!Node.isSeq(seq))
+ onError('Expected a sequence for this tag');
+ return seq;
+ }
+};
+
+exports.seq = seq;
diff --git a/node_modules/yaml/dist/schema/common/string.d.ts b/node_modules/yaml/dist/schema/common/string.d.ts
new file mode 100644
index 0000000..539c9b1
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/string.d.ts
@@ -0,0 +1,2 @@
+import type { ScalarTag } from '../types.js';
+export declare const string: ScalarTag;
diff --git a/node_modules/yaml/dist/schema/common/string.js b/node_modules/yaml/dist/schema/common/string.js
new file mode 100644
index 0000000..7601420
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/string.js
@@ -0,0 +1,16 @@
+'use strict';
+
+var stringifyString = require('../../stringify/stringifyString.js');
+
+const string = {
+ identify: value => typeof value === 'string',
+ default: true,
+ tag: 'tag:yaml.org,2002:str',
+ resolve: str => str,
+ stringify(item, ctx, onComment, onChompKeep) {
+ ctx = Object.assign({ actualString: true }, ctx);
+ return stringifyString.stringifyString(item, ctx, onComment, onChompKeep);
+ }
+};
+
+exports.string = string;
diff --git a/node_modules/yaml/dist/schema/core/bool.d.ts b/node_modules/yaml/dist/schema/core/bool.d.ts
new file mode 100644
index 0000000..e4bdc4c
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/bool.d.ts
@@ -0,0 +1,4 @@
+import type { ScalarTag } from '../types.js';
+export declare const boolTag: ScalarTag & {
+ test: RegExp;
+};
diff --git a/node_modules/yaml/dist/schema/core/bool.js b/node_modules/yaml/dist/schema/core/bool.js
new file mode 100644
index 0000000..4def73c
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/bool.js
@@ -0,0 +1,21 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+
+const boolTag = {
+ identify: value => typeof value === 'boolean',
+ default: true,
+ tag: 'tag:yaml.org,2002:bool',
+ test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,
+ resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'),
+ stringify({ source, value }, ctx) {
+ if (source && boolTag.test.test(source)) {
+ const sv = source[0] === 't' || source[0] === 'T';
+ if (value === sv)
+ return source;
+ }
+ return value ? ctx.options.trueStr : ctx.options.falseStr;
+ }
+};
+
+exports.boolTag = boolTag;
diff --git a/node_modules/yaml/dist/schema/core/float.d.ts b/node_modules/yaml/dist/schema/core/float.d.ts
new file mode 100644
index 0000000..22f0249
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/float.d.ts
@@ -0,0 +1,4 @@
+import type { ScalarTag } from '../types.js';
+export declare const floatNaN: ScalarTag;
+export declare const floatExp: ScalarTag;
+export declare const float: ScalarTag;
diff --git a/node_modules/yaml/dist/schema/core/float.js b/node_modules/yaml/dist/schema/core/float.js
new file mode 100644
index 0000000..a1c96dd
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/float.js
@@ -0,0 +1,47 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+var stringifyNumber = require('../../stringify/stringifyNumber.js');
+
+const floatNaN = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/,
+ resolve: str => str.slice(-3).toLowerCase() === 'nan'
+ ? NaN
+ : str[0] === '-'
+ ? Number.NEGATIVE_INFINITY
+ : Number.POSITIVE_INFINITY,
+ stringify: stringifyNumber.stringifyNumber
+};
+const floatExp = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ format: 'EXP',
+ test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/,
+ resolve: str => parseFloat(str),
+ stringify(node) {
+ const num = Number(node.value);
+ return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);
+ }
+};
+const float = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/,
+ resolve(str) {
+ const node = new Scalar.Scalar(parseFloat(str));
+ const dot = str.indexOf('.');
+ if (dot !== -1 && str[str.length - 1] === '0')
+ node.minFractionDigits = str.length - dot - 1;
+ return node;
+ },
+ stringify: stringifyNumber.stringifyNumber
+};
+
+exports.float = float;
+exports.floatExp = floatExp;
+exports.floatNaN = floatNaN;
diff --git a/node_modules/yaml/dist/schema/core/int.d.ts b/node_modules/yaml/dist/schema/core/int.d.ts
new file mode 100644
index 0000000..35e2d4b
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/int.d.ts
@@ -0,0 +1,4 @@
+import type { ScalarTag } from '../types.js';
+export declare const intOct: ScalarTag;
+export declare const int: ScalarTag;
+export declare const intHex: ScalarTag;
diff --git a/node_modules/yaml/dist/schema/core/int.js b/node_modules/yaml/dist/schema/core/int.js
new file mode 100644
index 0000000..fe4c9ca
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/int.js
@@ -0,0 +1,42 @@
+'use strict';
+
+var stringifyNumber = require('../../stringify/stringifyNumber.js');
+
+const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
+const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));
+function intStringify(node, radix, prefix) {
+ const { value } = node;
+ if (intIdentify(value) && value >= 0)
+ return prefix + value.toString(radix);
+ return stringifyNumber.stringifyNumber(node);
+}
+const intOct = {
+ identify: value => intIdentify(value) && value >= 0,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'OCT',
+ test: /^0o[0-7]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt),
+ stringify: node => intStringify(node, 8, '0o')
+};
+const int = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ test: /^[-+]?[0-9]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
+ stringify: stringifyNumber.stringifyNumber
+};
+const intHex = {
+ identify: value => intIdentify(value) && value >= 0,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'HEX',
+ test: /^0x[0-9a-fA-F]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
+ stringify: node => intStringify(node, 16, '0x')
+};
+
+exports.int = int;
+exports.intHex = intHex;
+exports.intOct = intOct;
diff --git a/node_modules/yaml/dist/schema/core/schema.d.ts b/node_modules/yaml/dist/schema/core/schema.d.ts
new file mode 100644
index 0000000..7663949
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/schema.d.ts
@@ -0,0 +1 @@
+export declare const schema: (import("../types.js").ScalarTag | import("../types.js").CollectionTag)[];
diff --git a/node_modules/yaml/dist/schema/core/schema.js b/node_modules/yaml/dist/schema/core/schema.js
new file mode 100644
index 0000000..6ab87f2
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/schema.js
@@ -0,0 +1,25 @@
+'use strict';
+
+var map = require('../common/map.js');
+var _null = require('../common/null.js');
+var seq = require('../common/seq.js');
+var string = require('../common/string.js');
+var bool = require('./bool.js');
+var float = require('./float.js');
+var int = require('./int.js');
+
+const schema = [
+ map.map,
+ seq.seq,
+ string.string,
+ _null.nullTag,
+ bool.boolTag,
+ int.intOct,
+ int.int,
+ int.intHex,
+ float.floatNaN,
+ float.floatExp,
+ float.float
+];
+
+exports.schema = schema;
diff --git a/node_modules/yaml/dist/schema/json/schema.d.ts b/node_modules/yaml/dist/schema/json/schema.d.ts
new file mode 100644
index 0000000..a166ca2
--- /dev/null
+++ b/node_modules/yaml/dist/schema/json/schema.d.ts
@@ -0,0 +1,2 @@
+import { CollectionTag, ScalarTag } from '../types.js';
+export declare const schema: (ScalarTag | CollectionTag)[];
diff --git a/node_modules/yaml/dist/schema/json/schema.js b/node_modules/yaml/dist/schema/json/schema.js
new file mode 100644
index 0000000..31d0b4d
--- /dev/null
+++ b/node_modules/yaml/dist/schema/json/schema.js
@@ -0,0 +1,64 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+var map = require('../common/map.js');
+var seq = require('../common/seq.js');
+
+function intIdentify(value) {
+ return typeof value === 'bigint' || Number.isInteger(value);
+}
+const stringifyJSON = ({ value }) => JSON.stringify(value);
+const jsonScalars = [
+ {
+ identify: value => typeof value === 'string',
+ default: true,
+ tag: 'tag:yaml.org,2002:str',
+ resolve: str => str,
+ stringify: stringifyJSON
+ },
+ {
+ identify: value => value == null,
+ createNode: () => new Scalar.Scalar(null),
+ default: true,
+ tag: 'tag:yaml.org,2002:null',
+ test: /^null$/,
+ resolve: () => null,
+ stringify: stringifyJSON
+ },
+ {
+ identify: value => typeof value === 'boolean',
+ default: true,
+ tag: 'tag:yaml.org,2002:bool',
+ test: /^true|false$/,
+ resolve: str => str === 'true',
+ stringify: stringifyJSON
+ },
+ {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ test: /^-?(?:0|[1-9][0-9]*)$/,
+ resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),
+ stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value)
+ },
+ {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,
+ resolve: str => parseFloat(str),
+ stringify: stringifyJSON
+ }
+];
+const jsonError = {
+ default: true,
+ tag: '',
+ test: /^/,
+ resolve(str, onError) {
+ onError(`Unresolved plain scalar ${JSON.stringify(str)}`);
+ return str;
+ }
+};
+const schema = [map.map, seq.seq].concat(jsonScalars, jsonError);
+
+exports.schema = schema;
diff --git a/node_modules/yaml/dist/schema/tags.d.ts b/node_modules/yaml/dist/schema/tags.d.ts
new file mode 100644
index 0000000..8b4acf2
--- /dev/null
+++ b/node_modules/yaml/dist/schema/tags.d.ts
@@ -0,0 +1,40 @@
+import { SchemaOptions } from '../options.js';
+import type { CollectionTag, ScalarTag } from './types.js';
+declare const tagsByName: {
+ binary: ScalarTag;
+ bool: ScalarTag & {
+ test: RegExp;
+ };
+ float: ScalarTag;
+ floatExp: ScalarTag;
+ floatNaN: ScalarTag;
+ floatTime: ScalarTag;
+ int: ScalarTag;
+ intHex: ScalarTag;
+ intOct: ScalarTag;
+ intTime: ScalarTag;
+ map: CollectionTag;
+ null: ScalarTag & {
+ test: RegExp;
+ };
+ omap: CollectionTag;
+ pairs: CollectionTag;
+ seq: CollectionTag;
+ set: CollectionTag;
+ timestamp: ScalarTag & {
+ test: RegExp;
+ };
+};
+export declare type TagId = keyof typeof tagsByName;
+export declare type Tags = Array<ScalarTag | CollectionTag | TagId>;
+export declare const coreKnownTags: {
+ 'tag:yaml.org,2002:binary': ScalarTag;
+ 'tag:yaml.org,2002:omap': CollectionTag;
+ 'tag:yaml.org,2002:pairs': CollectionTag;
+ 'tag:yaml.org,2002:set': CollectionTag;
+ 'tag:yaml.org,2002:timestamp': ScalarTag & {
+ test: RegExp;
+ };
+};
+export declare function getTags(customTags: SchemaOptions['customTags'] | undefined, schemaName: string): (ScalarTag | CollectionTag)[];
+export {};
diff --git a/node_modules/yaml/dist/schema/tags.js b/node_modules/yaml/dist/schema/tags.js
new file mode 100644
index 0000000..e49d2b0
--- /dev/null
+++ b/node_modules/yaml/dist/schema/tags.js
@@ -0,0 +1,86 @@
+'use strict';
+
+var map = require('./common/map.js');
+var _null = require('./common/null.js');
+var seq = require('./common/seq.js');
+var string = require('./common/string.js');
+var bool = require('./core/bool.js');
+var float = require('./core/float.js');
+var int = require('./core/int.js');
+var schema = require('./core/schema.js');
+var schema$1 = require('./json/schema.js');
+var binary = require('./yaml-1.1/binary.js');
+var omap = require('./yaml-1.1/omap.js');
+var pairs = require('./yaml-1.1/pairs.js');
+var schema$2 = require('./yaml-1.1/schema.js');
+var set = require('./yaml-1.1/set.js');
+var timestamp = require('./yaml-1.1/timestamp.js');
+
+const schemas = new Map([
+ ['core', schema.schema],
+ ['failsafe', [map.map, seq.seq, string.string]],
+ ['json', schema$1.schema],
+ ['yaml11', schema$2.schema],
+ ['yaml-1.1', schema$2.schema]
+]);
+const tagsByName = {
+ binary: binary.binary,
+ bool: bool.boolTag,
+ float: float.float,
+ floatExp: float.floatExp,
+ floatNaN: float.floatNaN,
+ floatTime: timestamp.floatTime,
+ int: int.int,
+ intHex: int.intHex,
+ intOct: int.intOct,
+ intTime: timestamp.intTime,
+ map: map.map,
+ null: _null.nullTag,
+ omap: omap.omap,
+ pairs: pairs.pairs,
+ seq: seq.seq,
+ set: set.set,
+ timestamp: timestamp.timestamp
+};
+const coreKnownTags = {
+ 'tag:yaml.org,2002:binary': binary.binary,
+ 'tag:yaml.org,2002:omap': omap.omap,
+ 'tag:yaml.org,2002:pairs': pairs.pairs,
+ 'tag:yaml.org,2002:set': set.set,
+ 'tag:yaml.org,2002:timestamp': timestamp.timestamp
+};
+function getTags(customTags, schemaName) {
+ let tags = schemas.get(schemaName);
+ if (!tags) {
+ if (Array.isArray(customTags))
+ tags = [];
+ else {
+ const keys = Array.from(schemas.keys())
+ .filter(key => key !== 'yaml11')
+ .map(key => JSON.stringify(key))
+ .join(', ');
+ throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`);
+ }
+ }
+ if (Array.isArray(customTags)) {
+ for (const tag of customTags)
+ tags = tags.concat(tag);
+ }
+ else if (typeof customTags === 'function') {
+ tags = customTags(tags.slice());
+ }
+ return tags.map(tag => {
+ if (typeof tag !== 'string')
+ return tag;
+ const tagObj = tagsByName[tag];
+ if (tagObj)
+ return tagObj;
+ const keys = Object.keys(tagsByName)
+ .map(key => JSON.stringify(key))
+ .join(', ');
+ throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
+ });
+}
+
+exports.coreKnownTags = coreKnownTags;
+exports.getTags = getTags;
diff --git a/node_modules/yaml/dist/schema/types.d.ts b/node_modules/yaml/dist/schema/types.d.ts
new file mode 100644
index 0000000..13e8354
--- /dev/null
+++ b/node_modules/yaml/dist/schema/types.d.ts
@@ -0,0 +1,82 @@
+import type { CreateNodeContext } from '../doc/createNode.js';
+import type { Schema } from './Schema.js';
+import type { Node } from '../nodes/Node.js';
+import type { Scalar } from '../nodes/Scalar.js';
+import type { YAMLMap } from '../nodes/YAMLMap.js';
+import type { YAMLSeq } from '../nodes/YAMLSeq.js';
+import type { ParseOptions } from '../options.js';
+import type { StringifyContext } from '../stringify/stringify.js';
+interface TagBase {
+ /**
+ * An optional factory function, used e.g. by collections when wrapping JS objects as AST nodes.
+ */
+ createNode?: (schema: Schema, value: unknown, ctx: CreateNodeContext) => Node;
+ /**
+ * If `true`, together with `test` allows for values to be stringified without
+ * an explicit tag. For most cases, it's unlikely that you'll actually want to
+ * use this, even if you first think you do.
+ */
+ default: boolean;
+ /**
+ * If a tag has multiple forms that should be parsed and/or stringified
+ * differently, use `format` to identify them.
+ */
+ format?: string;
+ /**
+ * Used by `YAML.createNode` to detect your data type, e.g. using `typeof` or
+ * `instanceof`.
+ */
+ identify?: (value: unknown) => boolean;
+ /**
+ * The identifier for your data type, with which its stringified form will be
+ * prefixed. Should either be a !-prefixed local `!tag`, or a fully qualified
+ * `tag:domain,date:foo`.
+ */
+ tag: string;
+}
+export interface ScalarTag extends TagBase {
+ collection?: never;
+ nodeClass?: never;
+ /**
+ * Turns a value into an AST node.
+ * If returning a non-`Node` value, the output will be wrapped as a `Scalar`.
+ */
+ resolve(value: string, onError: (message: string) => void, options: ParseOptions): unknown;
+ /**
+ * Optional function stringifying a Scalar node. If your data includes a
+ * suitable `.toString()` method, you can probably leave this undefined and
+ * use the default stringifier.
+ *
+ * @param item The node being stringified.
+ * @param ctx Contains the stringifying context variables.
+ * @param onComment Callback to signal that the stringifier includes the
+ * item's comment in its output.
+ * @param onChompKeep Callback to signal that the output uses a block scalar
+ * type with the `+` chomping indicator.
+ */
+ stringify?: (item: Scalar, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void) => string;
+ /**
+ * Together with `default` allows for values to be stringified without an
+ * explicit tag and detected using a regular expression. For most cases, it's
+ * unlikely that you'll actually want to use these, even if you first think
+ * you do.
+ */
+ test?: RegExp;
+}
+export interface CollectionTag extends TagBase {
+ stringify?: never;
+ test?: never;
+ /** The source collection type supported by this tag. */
+ collection: 'map' | 'seq';
+ /**
+ * The `Node` child class that implements this tag.
+ * If set, used to select this tag when stringifying.
+ */
+ nodeClass?: new () => Node;
+ /**
+ * Turns a value into an AST node.
+ * If returning a non-`Node` value, the output will be wrapped as a `Scalar`.
+ */
+ resolve(value: YAMLMap.Parsed | YAMLSeq.Parsed, onError: (message: string) => void, options: ParseOptions): unknown;
+}
+export {};
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts
new file mode 100644
index 0000000..2054970
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts
@@ -0,0 +1,2 @@
+import type { ScalarTag } from '../types.js';
+export declare const binary: ScalarTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/binary.js b/node_modules/yaml/dist/schema/yaml-1.1/binary.js
new file mode 100644
index 0000000..df93e05
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/binary.js
@@ -0,0 +1,68 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+var stringifyString = require('../../stringify/stringifyString.js');
+
+const binary = {
+ identify: value => value instanceof Uint8Array,
+ default: false,
+ tag: 'tag:yaml.org,2002:binary',
+ /**
+ * Returns a Buffer in node and an Uint8Array in browsers
+ *
+ * To use the resulting buffer as an image, you'll want to do something like:
+ *
+ * const blob = new Blob([buffer], { type: 'image/jpeg' })
+ * document.querySelector('#photo').src = URL.createObjectURL(blob)
+ */
+ resolve(src, onError) {
+ if (typeof Buffer === 'function') {
+ return Buffer.from(src, 'base64');
+ }
+ else if (typeof atob === 'function') {
+ // On IE 11, atob() can't handle newlines
+ const str = atob(src.replace(/[\n\r]/g, ''));
+ const buffer = new Uint8Array(str.length);
+ for (let i = 0; i < str.length; ++i)
+ buffer[i] = str.charCodeAt(i);
+ return buffer;
+ }
+ else {
+ onError('This environment does not support reading binary tags; either Buffer or atob is required');
+ return src;
+ }
+ },
+ stringify({ comment, type, value }, ctx, onComment, onChompKeep) {
+ const buf = value; // checked earlier by binary.identify()
+ let str;
+ if (typeof Buffer === 'function') {
+ str =
+ buf instanceof Buffer
+ ? buf.toString('base64')
+ : Buffer.from(buf.buffer).toString('base64');
+ }
+ else if (typeof btoa === 'function') {
+ let s = '';
+ for (let i = 0; i < buf.length; ++i)
+ s += String.fromCharCode(buf[i]);
+ str = btoa(s);
+ }
+ else {
+ throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
+ }
+ if (!type)
+ type = Scalar.Scalar.BLOCK_LITERAL;
+ if (type !== Scalar.Scalar.QUOTE_DOUBLE) {
+ const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);
+ const n = Math.ceil(str.length / lineWidth);
+ const lines = new Array(n);
+ for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {
+ lines[i] = str.substr(o, lineWidth);
+ }
+ str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\n' : ' ');
+ }
+ return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);
+ }
+};
+
+exports.binary = binary;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts
new file mode 100644
index 0000000..587b55b
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts
@@ -0,0 +1,7 @@
+import type { ScalarTag } from '../types.js';
+export declare const trueTag: ScalarTag & {
+ test: RegExp;
+};
+export declare const falseTag: ScalarTag & {
+ test: RegExp;
+};
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/bool.js b/node_modules/yaml/dist/schema/yaml-1.1/bool.js
new file mode 100644
index 0000000..fdb3b46
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/bool.js
@@ -0,0 +1,29 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+
+function boolStringify({ value, source }, ctx) {
+ const boolObj = value ? trueTag : falseTag;
+ if (source && boolObj.test.test(source))
+ return source;
+ return value ? ctx.options.trueStr : ctx.options.falseStr;
+}
+const trueTag = {
+ identify: value => value === true,
+ default: true,
+ tag: 'tag:yaml.org,2002:bool',
+ test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,
+ resolve: () => new Scalar.Scalar(true),
+ stringify: boolStringify
+};
+const falseTag = {
+ identify: value => value === false,
+ default: true,
+ tag: 'tag:yaml.org,2002:bool',
+ test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,
+ resolve: () => new Scalar.Scalar(false),
+ stringify: boolStringify
+};
+
+exports.falseTag = falseTag;
+exports.trueTag = trueTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/float.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/float.d.ts
new file mode 100644
index 0000000..22f0249
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/float.d.ts
@@ -0,0 +1,4 @@
+import type { ScalarTag } from '../types.js';
+export declare const floatNaN: ScalarTag;
+export declare const floatExp: ScalarTag;
+export declare const float: ScalarTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/float.js b/node_modules/yaml/dist/schema/yaml-1.1/float.js
new file mode 100644
index 0000000..4aea19a
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/float.js
@@ -0,0 +1,50 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+var stringifyNumber = require('../../stringify/stringifyNumber.js');
+
+const floatNaN = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/,
+ resolve: (str) => str.slice(-3).toLowerCase() === 'nan'
+ ? NaN
+ : str[0] === '-'
+ ? Number.NEGATIVE_INFINITY
+ : Number.POSITIVE_INFINITY,
+ stringify: stringifyNumber.stringifyNumber
+};
+const floatExp = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ format: 'EXP',
+ test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/,
+ resolve: (str) => parseFloat(str.replace(/_/g, '')),
+ stringify(node) {
+ const num = Number(node.value);
+ return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);
+ }
+};
+const float = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/,
+ resolve(str) {
+ const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, '')));
+ const dot = str.indexOf('.');
+ if (dot !== -1) {
+ const f = str.substring(dot + 1).replace(/_/g, '');
+ if (f[f.length - 1] === '0')
+ node.minFractionDigits = f.length;
+ }
+ return node;
+ },
+ stringify: stringifyNumber.stringifyNumber
+};
+
+exports.float = float;
+exports.floatExp = floatExp;
+exports.floatNaN = floatNaN;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/int.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/int.d.ts
new file mode 100644
index 0000000..3d92f37
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/int.d.ts
@@ -0,0 +1,5 @@
+import type { ScalarTag } from '../types.js';
+export declare const intBin: ScalarTag;
+export declare const intOct: ScalarTag;
+export declare const int: ScalarTag;
+export declare const intHex: ScalarTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/int.js b/node_modules/yaml/dist/schema/yaml-1.1/int.js
new file mode 100644
index 0000000..fdf47ca
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/int.js
@@ -0,0 +1,76 @@
+'use strict';
+
+var stringifyNumber = require('../../stringify/stringifyNumber.js');
+
+const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
+function intResolve(str, offset, radix, { intAsBigInt }) {
+ const sign = str[0];
+ if (sign === '-' || sign === '+')
+ offset += 1;
+ str = str.substring(offset).replace(/_/g, '');
+ if (intAsBigInt) {
+ switch (radix) {
+ case 2:
+ str = `0b${str}`;
+ break;
+ case 8:
+ str = `0o${str}`;
+ break;
+ case 16:
+ str = `0x${str}`;
+ break;
+ }
+ const n = BigInt(str);
+ return sign === '-' ? BigInt(-1) * n : n;
+ }
+ const n = parseInt(str, radix);
+ return sign === '-' ? -1 * n : n;
+}
+function intStringify(node, radix, prefix) {
+ const { value } = node;
+ if (intIdentify(value)) {
+ const str = value.toString(radix);
+ return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;
+ }
+ return stringifyNumber.stringifyNumber(node);
+}
+const intBin = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'BIN',
+ test: /^[-+]?0b[0-1_]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),
+ stringify: node => intStringify(node, 2, '0b')
+};
+const intOct = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'OCT',
+ test: /^[-+]?0[0-7_]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),
+ stringify: node => intStringify(node, 8, '0')
+};
+const int = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ test: /^[-+]?[0-9][0-9_]*$/,
+ resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
+ stringify: stringifyNumber.stringifyNumber
+};
+const intHex = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'HEX',
+ test: /^[-+]?0x[0-9a-fA-F_]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
+ stringify: node => intStringify(node, 16, '0x')
+};
+
+exports.int = int;
+exports.intBin = intBin;
+exports.intHex = intHex;
+exports.intOct = intOct;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts
new file mode 100644
index 0000000..4d71bc4
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts
@@ -0,0 +1,21 @@
+import { YAMLSeq } from '../../nodes/YAMLSeq.js';
+import { ToJSContext } from '../../nodes/toJS.js';
+import { CollectionTag } from '../types.js';
+export declare class YAMLOMap extends YAMLSeq {
+ static tag: string;
+ constructor();
+ add: (pair: import("../../index.js").Pair<any, any> | {
+ key: any;
+ value: any;
+ }, overwrite?: boolean | undefined) => void;
+ delete: (key: any) => boolean;
+ get: (key: any, keepScalar?: boolean | undefined) => unknown;
+ has: (key: any) => boolean;
+ set: (key: any, value: any) => void;
+ /**
+ * If `ctx` is given, the return type is actually `Map<unknown, unknown>`,
+ * but TypeScript won't allow widening the signature of a child method.
+ */
+ toJSON(_?: unknown, ctx?: ToJSContext): unknown[];
+}
+export declare const omap: CollectionTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/omap.js b/node_modules/yaml/dist/schema/yaml-1.1/omap.js
new file mode 100644
index 0000000..006a65d
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/omap.js
@@ -0,0 +1,76 @@
+'use strict';
+
+var YAMLSeq = require('../../nodes/YAMLSeq.js');
+var toJS = require('../../nodes/toJS.js');
+var Node = require('../../nodes/Node.js');
+var YAMLMap = require('../../nodes/YAMLMap.js');
+var pairs = require('./pairs.js');
+
+class YAMLOMap extends YAMLSeq.YAMLSeq {
+ constructor() {
+ super();
+ this.add = YAMLMap.YAMLMap.prototype.add.bind(this);
+ this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this);
+ this.get = YAMLMap.YAMLMap.prototype.get.bind(this);
+ this.has = YAMLMap.YAMLMap.prototype.has.bind(this);
+ this.set = YAMLMap.YAMLMap.prototype.set.bind(this);
+ this.tag = YAMLOMap.tag;
+ }
+ /**
+ * If `ctx` is given, the return type is actually `Map<unknown, unknown>`,
+ * but TypeScript won't allow widening the signature of a child method.
+ */
+ toJSON(_, ctx) {
+ if (!ctx)
+ return super.toJSON(_);
+ const map = new Map();
+ if (ctx === null || ctx === void 0 ? void 0 : ctx.onCreate)
+ ctx.onCreate(map);
+ for (const pair of this.items) {
+ let key, value;
+ if (Node.isPair(pair)) {
+ key = toJS.toJS(pair.key, '', ctx);
+ value = toJS.toJS(pair.value, key, ctx);
+ }
+ else {
+ key = toJS.toJS(pair, '', ctx);
+ }
+ if (map.has(key))
+ throw new Error('Ordered maps must not include duplicate keys');
+ map.set(key, value);
+ }
+ return map;
+ }
+}
+YAMLOMap.tag = 'tag:yaml.org,2002:omap';
+const omap = {
+ collection: 'seq',
+ identify: value => value instanceof Map,
+ nodeClass: YAMLOMap,
+ default: false,
+ tag: 'tag:yaml.org,2002:omap',
+ resolve(seq, onError) {
+ const pairs$1 = pairs.resolvePairs(seq, onError);
+ const seenKeys = [];
+ for (const { key } of pairs$1.items) {
+ if (Node.isScalar(key)) {
+ if (seenKeys.includes(key.value)) {
+ onError(`Ordered maps must not include duplicate keys: ${key.value}`);
+ }
+ else {
+ seenKeys.push(key.value);
+ }
+ }
+ }
+ return Object.assign(new YAMLOMap(), pairs$1);
+ },
+ createNode(schema, iterable, ctx) {
+ const pairs$1 = pairs.createPairs(schema, iterable, ctx);
+ const omap = new YAMLOMap();
+ omap.items = pairs$1.items;
+ return omap;
+ }
+};
+
+exports.YAMLOMap = YAMLOMap;
+exports.omap = omap;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts
new file mode 100644
index 0000000..8090ac2
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts
@@ -0,0 +1,10 @@
+import type { CreateNodeContext } from '../../doc/createNode.js';
+import { ParsedNode } from '../../nodes/Node.js';
+import { Pair } from '../../nodes/Pair.js';
+import { YAMLMap } from '../../nodes/YAMLMap.js';
+import { YAMLSeq } from '../../nodes/YAMLSeq.js';
+import type { Schema } from '../../schema/Schema.js';
+import type { CollectionTag } from '../types.js';
+export declare function resolvePairs(seq: YAMLSeq.Parsed<ParsedNode | Pair<ParsedNode, ParsedNode | null>> | YAMLMap.Parsed, onError: (message: string) => void): YAMLSeq.Parsed<Pair<ParsedNode, ParsedNode | null>>;
+export declare function createPairs(schema: Schema, iterable: unknown, ctx: CreateNodeContext): YAMLSeq<unknown>;
+export declare const pairs: CollectionTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/pairs.js b/node_modules/yaml/dist/schema/yaml-1.1/pairs.js
new file mode 100644
index 0000000..12858ab
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/pairs.js
@@ -0,0 +1,82 @@
+'use strict';
+
+var Node = require('../../nodes/Node.js');
+var Pair = require('../../nodes/Pair.js');
+var Scalar = require('../../nodes/Scalar.js');
+var YAMLSeq = require('../../nodes/YAMLSeq.js');
+
+function resolvePairs(seq, onError) {
+ var _a;
+ if (Node.isSeq(seq)) {
+ for (let i = 0; i < seq.items.length; ++i) {
+ let item = seq.items[i];
+ if (Node.isPair(item))
+ continue;
+ else if (Node.isMap(item)) {
+ if (item.items.length > 1)
+ onError('Each pair must have its own sequence indicator');
+ const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null));
+ if (item.commentBefore)
+ pair.key.commentBefore = pair.key.commentBefore
+ ? `${item.commentBefore}\n${pair.key.commentBefore}`
+ : item.commentBefore;
+ if (item.comment) {
+ const cn = (_a = pair.value) !== null && _a !== void 0 ? _a : pair.key;
+ cn.comment = cn.comment
+ ? `${item.comment}\n${cn.comment}`
+ : item.comment;
+ }
+ item = pair;
+ }
+ seq.items[i] = Node.isPair(item) ? item : new Pair.Pair(item);
+ }
+ }
+ else
+ onError('Expected a sequence for this tag');
+ return seq;
+}
+function createPairs(schema, iterable, ctx) {
+ const { replacer } = ctx;
+ const pairs = new YAMLSeq.YAMLSeq(schema);
+ pairs.tag = 'tag:yaml.org,2002:pairs';
+ let i = 0;
+ if (iterable && Symbol.iterator in Object(iterable))
+ for (let it of iterable) {
+ if (typeof replacer === 'function')
+ it = replacer.call(iterable, String(i++), it);
+ let key, value;
+ if (Array.isArray(it)) {
+ if (it.length === 2) {
+ key = it[0];
+ value = it[1];
+ }
+ else
+ throw new TypeError(`Expected [key, value] tuple: ${it}`);
+ }
+ else if (it && it instanceof Object) {
+ const keys = Object.keys(it);
+ if (keys.length === 1) {
+ key = keys[0];
+ value = it[key];
+ }
+ else
+ throw new TypeError(`Expected { key: value } tuple: ${it}`);
+ }
+ else {
+ key = it;
+ }
+ pairs.items.push(Pair.createPair(key, value, ctx));
+ }
+ return pairs;
+}
+const pairs = {
+ collection: 'seq',
+ default: false,
+ tag: 'tag:yaml.org,2002:pairs',
+ resolve: resolvePairs,
+ createNode: createPairs
+};
+
+exports.createPairs = createPairs;
+exports.pairs = pairs;
+exports.resolvePairs = resolvePairs;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts
new file mode 100644
index 0000000..7663949
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts
@@ -0,0 +1 @@
+export declare const schema: (import("../types.js").ScalarTag | import("../types.js").CollectionTag)[];
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/schema.js b/node_modules/yaml/dist/schema/yaml-1.1/schema.js
new file mode 100644
index 0000000..c04270a
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/schema.js
@@ -0,0 +1,39 @@
+'use strict';
+
+var map = require('../common/map.js');
+var _null = require('../common/null.js');
+var seq = require('../common/seq.js');
+var string = require('../common/string.js');
+var binary = require('./binary.js');
+var bool = require('./bool.js');
+var float = require('./float.js');
+var int = require('./int.js');
+var omap = require('./omap.js');
+var pairs = require('./pairs.js');
+var set = require('./set.js');
+var timestamp = require('./timestamp.js');
+
+const schema = [
+ map.map,
+ seq.seq,
+ string.string,
+ _null.nullTag,
+ bool.trueTag,
+ bool.falseTag,
+ int.intBin,
+ int.intOct,
+ int.int,
+ int.intHex,
+ float.floatNaN,
+ float.floatExp,
+ float.float,
+ binary.binary,
+ omap.omap,
+ pairs.pairs,
+ set.set,
+ timestamp.intTime,
+ timestamp.floatTime,
+ timestamp.timestamp
+];
+
+exports.schema = schema;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/set.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/set.d.ts
new file mode 100644
index 0000000..fa7cdac
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/set.d.ts
@@ -0,0 +1,22 @@
+import type { Schema } from '../../schema/Schema.js';
+import { Pair } from '../../nodes/Pair.js';
+import { Scalar } from '../../nodes/Scalar.js';
+import { ToJSContext } from '../../nodes/toJS.js';
+import { YAMLMap } from '../../nodes/YAMLMap.js';
+import type { StringifyContext } from '../../stringify/stringify.js';
+import type { CollectionTag } from '../types.js';
+export declare class YAMLSet<T = unknown> extends YAMLMap<T, Scalar<null> | null> {
+ static tag: string;
+ constructor(schema?: Schema);
+ add(key: T | Pair<T, Scalar<null> | null> | {
+ key: T;
+ value: Scalar<null> | null;
+ }): void;
+ get(key?: T, keepPair?: boolean): unknown;
+ set(key: T, value: boolean): void;
+ /** Will throw; `value` must be boolean */
+ set(key: T, value: null): void;
+ toJSON(_?: unknown, ctx?: ToJSContext): any;
+ toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
+}
+export declare const set: CollectionTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/set.js b/node_modules/yaml/dist/schema/yaml-1.1/set.js
new file mode 100644
index 0000000..2270695
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/set.js
@@ -0,0 +1,90 @@
+'use strict';
+
+var Node = require('../../nodes/Node.js');
+var Pair = require('../../nodes/Pair.js');
+var YAMLMap = require('../../nodes/YAMLMap.js');
+
+class YAMLSet extends YAMLMap.YAMLMap {
+ constructor(schema) {
+ super(schema);
+ this.tag = YAMLSet.tag;
+ }
+ add(key) {
+ let pair;
+ if (Node.isPair(key))
+ pair = key;
+ else if (typeof key === 'object' &&
+ 'key' in key &&
+ 'value' in key &&
+ key.value === null)
+ pair = new Pair.Pair(key.key, null);
+ else
+ pair = new Pair.Pair(key, null);
+ const prev = YAMLMap.findPair(this.items, pair.key);
+ if (!prev)
+ this.items.push(pair);
+ }
+ get(key, keepPair) {
+ const pair = YAMLMap.findPair(this.items, key);
+ return !keepPair && Node.isPair(pair)
+ ? Node.isScalar(pair.key)
+ ? pair.key.value
+ : pair.key
+ : pair;
+ }
+ set(key, value) {
+ if (typeof value !== 'boolean')
+ throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);
+ const prev = YAMLMap.findPair(this.items, key);
+ if (prev && !value) {
+ this.items.splice(this.items.indexOf(prev), 1);
+ }
+ else if (!prev && value) {
+ this.items.push(new Pair.Pair(key));
+ }
+ }
+ toJSON(_, ctx) {
+ return super.toJSON(_, ctx, Set);
+ }
+ toString(ctx, onComment, onChompKeep) {
+ if (!ctx)
+ return JSON.stringify(this);
+ if (this.hasAllNullValues(true))
+ return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);
+ else
+ throw new Error('Set items must all have null values');
+ }
+}
+YAMLSet.tag = 'tag:yaml.org,2002:set';
+const set = {
+ collection: 'map',
+ identify: value => value instanceof Set,
+ nodeClass: YAMLSet,
+ default: false,
+ tag: 'tag:yaml.org,2002:set',
+ resolve(map, onError) {
+ if (Node.isMap(map)) {
+ if (map.hasAllNullValues(true))
+ return Object.assign(new YAMLSet(), map);
+ else
+ onError('Set items must all have null values');
+ }
+ else
+ onError('Expected a mapping for this tag');
+ return map;
+ },
+ createNode(schema, iterable, ctx) {
+ const { replacer } = ctx;
+ const set = new YAMLSet(schema);
+ if (iterable && Symbol.iterator in Object(iterable))
+ for (let value of iterable) {
+ if (typeof replacer === 'function')
+ value = replacer.call(iterable, value, value);
+ set.items.push(Pair.createPair(value, null, ctx));
+ }
+ return set;
+ }
+};
+
+exports.YAMLSet = YAMLSet;
+exports.set = set;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts
new file mode 100644
index 0000000..0c1d2d4
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts
@@ -0,0 +1,6 @@
+import type { ScalarTag } from '../types.js';
+export declare const intTime: ScalarTag;
+export declare const floatTime: ScalarTag;
+export declare const timestamp: ScalarTag & {
+ test: RegExp;
+};
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/timestamp.js b/node_modules/yaml/dist/schema/yaml-1.1/timestamp.js
new file mode 100644
index 0000000..2d78ae8
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/timestamp.js
@@ -0,0 +1,105 @@
+'use strict';
+
+var stringifyNumber = require('../../stringify/stringifyNumber.js');
+
+/** Internal types handle bigint as number, because TS can't figure it out. */
+function parseSexagesimal(str, asBigInt) {
+ const sign = str[0];
+ const parts = sign === '-' || sign === '+' ? str.substring(1) : str;
+ const num = (n) => asBigInt ? BigInt(n) : Number(n);
+ const res = parts
+ .replace(/_/g, '')
+ .split(':')
+ .reduce((res, p) => res * num(60) + num(p), num(0));
+ return (sign === '-' ? num(-1) * res : res);
+}
+/**
+ * hhhh:mm:ss.sss
+ *
+ * Internal types handle bigint as number, because TS can't figure it out.
+ */
+function stringifySexagesimal(node) {
+ let { value } = node;
+ let num = (n) => n;
+ if (typeof value === 'bigint')
+ num = n => BigInt(n);
+ else if (isNaN(value) || !isFinite(value))
+ return stringifyNumber.stringifyNumber(node);
+ let sign = '';
+ if (value < 0) {
+ sign = '-';
+ value *= num(-1);
+ }
+ const _60 = num(60);
+ const parts = [value % _60]; // seconds, including ms
+ if (value < 60) {
+ parts.unshift(0); // at least one : is required
+ }
+ else {
+ value = (value - parts[0]) / _60;
+ parts.unshift(value % _60); // minutes
+ if (value >= 60) {
+ value = (value - parts[0]) / _60;
+ parts.unshift(value); // hours
+ }
+ }
+ return (sign +
+ parts
+ .map(n => (n < 10 ? '0' + String(n) : String(n)))
+ .join(':')
+ .replace(/000000\d*$/, '') // % 60 may introduce error
+ );
+}
+const intTime = {
+ identify: value => typeof value === 'bigint' || Number.isInteger(value),
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'TIME',
+ test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,
+ resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),
+ stringify: stringifySexagesimal
+};
+const floatTime = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ format: 'TIME',
+ test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/,
+ resolve: str => parseSexagesimal(str, false),
+ stringify: stringifySexagesimal
+};
+const timestamp = {
+ identify: value => value instanceof Date,
+ default: true,
+ tag: 'tag:yaml.org,2002:timestamp',
+ // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part
+ // may be omitted altogether, resulting in a date format. In such a case, the time part is
+ // assumed to be 00:00:00Z (start of day, UTC).
+ test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd
+ '(?:' + // time is optional
+ '(?:t|T|[ \\t]+)' + // t | T | whitespace
+ '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?
+ '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30
+ ')?$'),
+ resolve(str) {
+ const match = str.match(timestamp.test);
+ if (!match)
+ throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');
+ const [, year, month, day, hour, minute, second] = match.map(Number);
+ const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;
+ let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);
+ const tz = match[8];
+ if (tz && tz !== 'Z') {
+ let d = parseSexagesimal(tz, false);
+ if (Math.abs(d) < 30)
+ d *= 60;
+ date -= 60000 * d;
+ }
+ return new Date(date);
+ },
+ stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '')
+};
+
+exports.floatTime = floatTime;
+exports.intTime = intTime;
+exports.timestamp = timestamp;
diff --git a/node_modules/yaml/dist/stringify/foldFlowLines.d.ts b/node_modules/yaml/dist/stringify/foldFlowLines.d.ts
new file mode 100644
index 0000000..58f8c7b
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/foldFlowLines.d.ts
@@ -0,0 +1,34 @@
+export declare const FOLD_FLOW = "flow";
+export declare const FOLD_BLOCK = "block";
+export declare const FOLD_QUOTED = "quoted";
+/**
+ * `'block'` prevents more-indented lines from being folded;
+ * `'quoted'` allows for `\` escapes, including escaped newlines
+ */
+export declare type FoldMode = 'flow' | 'block' | 'quoted';
+export interface FoldOptions {
+ /**
+ * Accounts for leading contents on the first line, defaulting to
+ * `indent.length`
+ */
+ indentAtStart?: number;
+ /** Default: `80` */
+ lineWidth?: number;
+ /**
+ * Allow highly indented lines to stretch the line width or indent content
+ * from the start.
+ *
+ * Default: `20`
+ */
+ minContentWidth?: number;
+ /** Called once if the text is folded */
+ onFold?: () => void;
+ /** Called once if any line of text exceeds lineWidth characters */
+ onOverflow?: () => void;
+}
+/**
+ * Tries to keep input at up to `lineWidth` characters, splitting only on spaces
+ * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are
+ * terminated with `\n` and started with `indent`.
+ */
+export declare function foldFlowLines(text: string, indent: string, mode?: FoldMode, { indentAtStart, lineWidth, minContentWidth, onFold, onOverflow }?: FoldOptions): string;
diff --git a/node_modules/yaml/dist/stringify/foldFlowLines.js b/node_modules/yaml/dist/stringify/foldFlowLines.js
new file mode 100644
index 0000000..efe7a25
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/foldFlowLines.js
@@ -0,0 +1,140 @@
+'use strict';
+
+const FOLD_FLOW = 'flow';
+const FOLD_BLOCK = 'block';
+const FOLD_QUOTED = 'quoted';
+/**
+ * Tries to keep input at up to `lineWidth` characters, splitting only on spaces
+ * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are
+ * terminated with `\n` and started with `indent`.
+ */
+function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {
+ if (!lineWidth || lineWidth < 0)
+ return text;
+ const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);
+ if (text.length <= endStep)
+ return text;
+ const folds = [];
+ const escapedFolds = {};
+ let end = lineWidth - indent.length;
+ if (typeof indentAtStart === 'number') {
+ if (indentAtStart > lineWidth - Math.max(2, minContentWidth))
+ folds.push(0);
+ else
+ end = lineWidth - indentAtStart;
+ }
+ let split = undefined;
+ let prev = undefined;
+ let overflow = false;
+ let i = -1;
+ let escStart = -1;
+ let escEnd = -1;
+ if (mode === FOLD_BLOCK) {
+ i = consumeMoreIndentedLines(text, i);
+ if (i !== -1)
+ end = i + endStep;
+ }
+ for (let ch; (ch = text[(i += 1)]);) {
+ if (mode === FOLD_QUOTED && ch === '\\') {
+ escStart = i;
+ switch (text[i + 1]) {
+ case 'x':
+ i += 3;
+ break;
+ case 'u':
+ i += 5;
+ break;
+ case 'U':
+ i += 9;
+ break;
+ default:
+ i += 1;
+ }
+ escEnd = i;
+ }
+ if (ch === '\n') {
+ if (mode === FOLD_BLOCK)
+ i = consumeMoreIndentedLines(text, i);
+ end = i + endStep;
+ split = undefined;
+ }
+ else {
+ if (ch === ' ' &&
+ prev &&
+ prev !== ' ' &&
+ prev !== '\n' &&
+ prev !== '\t') {
+ // space surrounded by non-space can be replaced with newline + indent
+ const next = text[i + 1];
+ if (next && next !== ' ' && next !== '\n' && next !== '\t')
+ split = i;
+ }
+ if (i >= end) {
+ if (split) {
+ folds.push(split);
+ end = split + endStep;
+ split = undefined;
+ }
+ else if (mode === FOLD_QUOTED) {
+ // white-space collected at end may stretch past lineWidth
+ while (prev === ' ' || prev === '\t') {
+ prev = ch;
+ ch = text[(i += 1)];
+ overflow = true;
+ }
+ // Account for newline escape, but don't break preceding escape
+ const j = i > escEnd + 1 ? i - 2 : escStart - 1;
+ // Bail out if lineWidth & minContentWidth are shorter than an escape string
+ if (escapedFolds[j])
+ return text;
+ folds.push(j);
+ escapedFolds[j] = true;
+ end = j + endStep;
+ split = undefined;
+ }
+ else {
+ overflow = true;
+ }
+ }
+ }
+ prev = ch;
+ }
+ if (overflow && onOverflow)
+ onOverflow();
+ if (folds.length === 0)
+ return text;
+ if (onFold)
+ onFold();
+ let res = text.slice(0, folds[0]);
+ for (let i = 0; i < folds.length; ++i) {
+ const fold = folds[i];
+ const end = folds[i + 1] || text.length;
+ if (fold === 0)
+ res = `\n${indent}${text.slice(0, end)}`;
+ else {
+ if (mode === FOLD_QUOTED && escapedFolds[fold])
+ res += `${text[fold]}\\`;
+ res += `\n${indent}${text.slice(fold + 1, end)}`;
+ }
+ }
+ return res;
+}
+/**
+ * Presumes `i + 1` is at the start of a line
+ * @returns index of last newline in more-indented block
+ */
+function consumeMoreIndentedLines(text, i) {
+ let ch = text[i + 1];
+ while (ch === ' ' || ch === '\t') {
+ do {
+ ch = text[(i += 1)];
+ } while (ch && ch !== '\n');
+ ch = text[i + 1];
+ }
+ return i;
+}
+
+exports.FOLD_BLOCK = FOLD_BLOCK;
+exports.FOLD_FLOW = FOLD_FLOW;
+exports.FOLD_QUOTED = FOLD_QUOTED;
+exports.foldFlowLines = foldFlowLines;
diff --git a/node_modules/yaml/dist/stringify/stringify.d.ts b/node_modules/yaml/dist/stringify/stringify.d.ts
new file mode 100644
index 0000000..fe96889
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringify.d.ts
@@ -0,0 +1,20 @@
+import type { Document } from '../doc/Document.js';
+import type { Alias } from '../nodes/Alias.js';
+import type { ToStringOptions } from '../options.js';
+export declare type StringifyContext = {
+ actualString?: boolean;
+ allNullValues?: boolean;
+ anchors: Set<string>;
+ doc: Document;
+ forceBlockIndent?: boolean;
+ implicitKey?: boolean;
+ indent: string;
+ indentStep: string;
+ indentAtStart?: number;
+ inFlow: boolean | null;
+ inStringifyKey?: boolean;
+ options: Readonly<Required<Omit<ToStringOptions, 'collectionStyle' | 'indent'>>>;
+ resolvedAliases?: Set<Alias>;
+};
+export declare function createStringifyContext(doc: Document, options: ToStringOptions): StringifyContext;
+export declare function stringify(item: unknown, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
diff --git a/node_modules/yaml/dist/stringify/stringify.js b/node_modules/yaml/dist/stringify/stringify.js
new file mode 100644
index 0000000..b6fd122
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringify.js
@@ -0,0 +1,127 @@
+'use strict';
+
+var anchors = require('../doc/anchors.js');
+var Node = require('../nodes/Node.js');
+var stringifyComment = require('./stringifyComment.js');
+var stringifyString = require('./stringifyString.js');
+
+function createStringifyContext(doc, options) {
+ const opt = Object.assign({
+ blockQuote: true,
+ commentString: stringifyComment.stringifyComment,
+ defaultKeyType: null,
+ defaultStringType: 'PLAIN',
+ directives: null,
+ doubleQuotedAsJSON: false,
+ doubleQuotedMinMultiLineLength: 40,
+ falseStr: 'false',
+ indentSeq: true,
+ lineWidth: 80,
+ minContentWidth: 20,
+ nullStr: 'null',
+ simpleKeys: false,
+ singleQuote: null,
+ trueStr: 'true',
+ verifyAliasOrder: true
+ }, doc.schema.toStringOptions, options);
+ let inFlow;
+ switch (opt.collectionStyle) {
+ case 'block':
+ inFlow = false;
+ break;
+ case 'flow':
+ inFlow = true;
+ break;
+ default:
+ inFlow = null;
+ }
+ return {
+ anchors: new Set(),
+ doc,
+ indent: '',
+ indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',
+ inFlow,
+ options: opt
+ };
+}
+function getTagObject(tags, item) {
+ var _a, _b, _c, _d;
+ if (item.tag) {
+ const match = tags.filter(t => t.tag === item.tag);
+ if (match.length > 0)
+ return (_a = match.find(t => t.format === item.format)) !== null && _a !== void 0 ? _a : match[0];
+ }
+ let tagObj = undefined;
+ let obj;
+ if (Node.isScalar(item)) {
+ obj = item.value;
+ const match = tags.filter(t => { var _a; return (_a = t.identify) === null || _a === void 0 ? void 0 : _a.call(t, obj); });
+ tagObj =
+ (_b = match.find(t => t.format === item.format)) !== null && _b !== void 0 ? _b : match.find(t => !t.format);
+ }
+ else {
+ obj = item;
+ tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);
+ }
+ if (!tagObj) {
+ const name = (_d = (_c = obj === null || obj === void 0 ? void 0 : obj.constructor) === null || _c === void 0 ? void 0 : _c.name) !== null && _d !== void 0 ? _d : typeof obj;
+ throw new Error(`Tag not resolved for ${name} value`);
+ }
+ return tagObj;
+}
+// needs to be called before value stringifier to allow for circular anchor refs
+function stringifyProps(node, tagObj, { anchors: anchors$1, doc }) {
+ if (!doc.directives)
+ return '';
+ const props = [];
+ const anchor = (Node.isScalar(node) || Node.isCollection(node)) && node.anchor;
+ if (anchor && anchors.anchorIsValid(anchor)) {
+ anchors$1.add(anchor);
+ props.push(`&${anchor}`);
+ }
+ const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;
+ if (tag)
+ props.push(doc.directives.tagString(tag));
+ return props.join(' ');
+}
+function stringify(item, ctx, onComment, onChompKeep) {
+ var _a, _b;
+ if (Node.isPair(item))
+ return item.toString(ctx, onComment, onChompKeep);
+ if (Node.isAlias(item)) {
+ if (ctx.doc.directives)
+ return item.toString(ctx);
+ if ((_a = ctx.resolvedAliases) === null || _a === void 0 ? void 0 : _a.has(item)) {
+ throw new TypeError(`Cannot stringify circular structure without alias nodes`);
+ }
+ else {
+ if (ctx.resolvedAliases)
+ ctx.resolvedAliases.add(item);
+ else
+ ctx.resolvedAliases = new Set([item]);
+ item = item.resolve(ctx.doc);
+ }
+ }
+ let tagObj = undefined;
+ const node = Node.isNode(item)
+ ? item
+ : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });
+ if (!tagObj)
+ tagObj = getTagObject(ctx.doc.schema.tags, node);
+ const props = stringifyProps(node, tagObj, ctx);
+ if (props.length > 0)
+ ctx.indentAtStart = ((_b = ctx.indentAtStart) !== null && _b !== void 0 ? _b : 0) + props.length + 1;
+ const str = typeof tagObj.stringify === 'function'
+ ? tagObj.stringify(node, ctx, onComment, onChompKeep)
+ : Node.isScalar(node)
+ ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep)
+ : node.toString(ctx, onComment, onChompKeep);
+ if (!props)
+ return str;
+ return Node.isScalar(node) || str[0] === '{' || str[0] === '['
+ ? `${props} ${str}`
+ : `${props}\n${ctx.indent}${str}`;
+}
+
+exports.createStringifyContext = createStringifyContext;
+exports.stringify = stringify;
diff --git a/node_modules/yaml/dist/stringify/stringifyCollection.d.ts b/node_modules/yaml/dist/stringify/stringifyCollection.d.ts
new file mode 100644
index 0000000..207d703
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyCollection.d.ts
@@ -0,0 +1,17 @@
+import { Collection } from '../nodes/Collection.js';
+import { StringifyContext } from './stringify.js';
+interface StringifyCollectionOptions {
+ blockItemPrefix: string;
+ flowChars: {
+ start: '{';
+ end: '}';
+ } | {
+ start: '[';
+ end: ']';
+ };
+ itemIndent: string;
+ onChompKeep?: () => void;
+ onComment?: () => void;
+}
+export declare function stringifyCollection(collection: Readonly<Collection>, ctx: StringifyContext, options: StringifyCollectionOptions): string;
+export {};
diff --git a/node_modules/yaml/dist/stringify/stringifyCollection.js b/node_modules/yaml/dist/stringify/stringifyCollection.js
new file mode 100644
index 0000000..7fc848a
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyCollection.js
@@ -0,0 +1,154 @@
+'use strict';
+
+var Collection = require('../nodes/Collection.js');
+var Node = require('../nodes/Node.js');
+var stringify = require('./stringify.js');
+var stringifyComment = require('./stringifyComment.js');
+
+function stringifyCollection(collection, ctx, options) {
+ var _a;
+ const flow = (_a = ctx.inFlow) !== null && _a !== void 0 ? _a : collection.flow;
+ const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;
+ return stringify(collection, ctx, options);
+}
+function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {
+ const { indent, options: { commentString } } = ctx;
+ const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });
+ let chompKeep = false; // flag for the preceding node's status
+ const lines = [];
+ for (let i = 0; i < items.length; ++i) {
+ const item = items[i];
+ let comment = null;
+ if (Node.isNode(item)) {
+ if (!chompKeep && item.spaceBefore)
+ lines.push('');
+ addCommentBefore(ctx, lines, item.commentBefore, chompKeep);
+ if (item.comment)
+ comment = item.comment;
+ }
+ else if (Node.isPair(item)) {
+ const ik = Node.isNode(item.key) ? item.key : null;
+ if (ik) {
+ if (!chompKeep && ik.spaceBefore)
+ lines.push('');
+ addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);
+ }
+ }
+ chompKeep = false;
+ let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true));
+ if (comment)
+ str += stringifyComment.lineComment(str, itemIndent, commentString(comment));
+ if (chompKeep && comment)
+ chompKeep = false;
+ lines.push(blockItemPrefix + str);
+ }
+ let str;
+ if (lines.length === 0) {
+ str = flowChars.start + flowChars.end;
+ }
+ else {
+ str = lines[0];
+ for (let i = 1; i < lines.length; ++i) {
+ const line = lines[i];
+ str += line ? `\n${indent}${line}` : '\n';
+ }
+ }
+ if (comment) {
+ str += '\n' + stringifyComment.indentComment(commentString(comment), indent);
+ if (onComment)
+ onComment();
+ }
+ else if (chompKeep && onChompKeep)
+ onChompKeep();
+ return str;
+}
+function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {
+ const { indent, indentStep, options: { commentString } } = ctx;
+ itemIndent += indentStep;
+ const itemCtx = Object.assign({}, ctx, {
+ indent: itemIndent,
+ inFlow: true,
+ type: null
+ });
+ let reqNewline = false;
+ let linesAtValue = 0;
+ const lines = [];
+ for (let i = 0; i < items.length; ++i) {
+ const item = items[i];
+ let comment = null;
+ if (Node.isNode(item)) {
+ if (item.spaceBefore)
+ lines.push('');
+ addCommentBefore(ctx, lines, item.commentBefore, false);
+ if (item.comment)
+ comment = item.comment;
+ }
+ else if (Node.isPair(item)) {
+ const ik = Node.isNode(item.key) ? item.key : null;
+ if (ik) {
+ if (ik.spaceBefore)
+ lines.push('');
+ addCommentBefore(ctx, lines, ik.commentBefore, false);
+ if (ik.comment)
+ reqNewline = true;
+ }
+ const iv = Node.isNode(item.value) ? item.value : null;
+ if (iv) {
+ if (iv.comment)
+ comment = iv.comment;
+ if (iv.commentBefore)
+ reqNewline = true;
+ }
+ else if (item.value == null && ik && ik.comment) {
+ comment = ik.comment;
+ }
+ }
+ if (comment)
+ reqNewline = true;
+ let str = stringify.stringify(item, itemCtx, () => (comment = null));
+ if (i < items.length - 1)
+ str += ',';
+ if (comment)
+ str += stringifyComment.lineComment(str, itemIndent, commentString(comment));
+ if (!reqNewline && (lines.length > linesAtValue || str.includes('\n')))
+ reqNewline = true;
+ lines.push(str);
+ linesAtValue = lines.length;
+ }
+ let str;
+ const { start, end } = flowChars;
+ if (lines.length === 0) {
+ str = start + end;
+ }
+ else {
+ if (!reqNewline) {
+ const len = lines.reduce((sum, line) => sum + line.length + 2, 2);
+ reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength;
+ }
+ if (reqNewline) {
+ str = start;
+ for (const line of lines)
+ str += line ? `\n${indentStep}${indent}${line}` : '\n';
+ str += `\n${indent}${end}`;
+ }
+ else {
+ str = `${start} ${lines.join(' ')} ${end}`;
+ }
+ }
+ if (comment) {
+ str += stringifyComment.lineComment(str, commentString(comment), indent);
+ if (onComment)
+ onComment();
+ }
+ return str;
+}
+function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {
+ if (comment && chompKeep)
+ comment = comment.replace(/^\n+/, '');
+ if (comment) {
+ const ic = stringifyComment.indentComment(commentString(comment), indent);
+ lines.push(ic.trimStart()); // Avoid double indent on first line
+ }
+}
+
+exports.stringifyCollection = stringifyCollection;
diff --git a/node_modules/yaml/dist/stringify/stringifyComment.d.ts b/node_modules/yaml/dist/stringify/stringifyComment.d.ts
new file mode 100644
index 0000000..9fcf48d
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyComment.d.ts
@@ -0,0 +1,10 @@
+/**
+ * Stringifies a comment.
+ *
+ * Empty comment lines are left empty,
+ * lines consisting of a single space are replaced by `#`,
+ * and all other lines are prefixed with a `#`.
+ */
+export declare const stringifyComment: (str: string) => string;
+export declare function indentComment(comment: string, indent: string): string;
+export declare const lineComment: (str: string, indent: string, comment: string) => string;
diff --git a/node_modules/yaml/dist/stringify/stringifyComment.js b/node_modules/yaml/dist/stringify/stringifyComment.js
new file mode 100644
index 0000000..26bf361
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyComment.js
@@ -0,0 +1,24 @@
+'use strict';
+
+/**
+ * Stringifies a comment.
+ *
+ * Empty comment lines are left empty,
+ * lines consisting of a single space are replaced by `#`,
+ * and all other lines are prefixed with a `#`.
+ */
+const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');
+function indentComment(comment, indent) {
+ if (/^\n+$/.test(comment))
+ return comment.substring(1);
+ return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;
+}
+const lineComment = (str, indent, comment) => str.endsWith('\n')
+ ? indentComment(comment, indent)
+ : comment.includes('\n')
+ ? '\n' + indentComment(comment, indent)
+ : (str.endsWith(' ') ? '' : ' ') + comment;
+
+exports.indentComment = indentComment;
+exports.lineComment = lineComment;
+exports.stringifyComment = stringifyComment;
diff --git a/node_modules/yaml/dist/stringify/stringifyDocument.d.ts b/node_modules/yaml/dist/stringify/stringifyDocument.d.ts
new file mode 100644
index 0000000..fb0633c
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyDocument.d.ts
@@ -0,0 +1,3 @@
+import { Document } from '../doc/Document.js';
+import { ToStringOptions } from '../options.js';
+export declare function stringifyDocument(doc: Readonly<Document>, options: ToStringOptions): string;
diff --git a/node_modules/yaml/dist/stringify/stringifyDocument.js b/node_modules/yaml/dist/stringify/stringifyDocument.js
new file mode 100644
index 0000000..ead7df6
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyDocument.js
@@ -0,0 +1,88 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var stringify = require('./stringify.js');
+var stringifyComment = require('./stringifyComment.js');
+
+function stringifyDocument(doc, options) {
+ var _a;
+ const lines = [];
+ let hasDirectives = options.directives === true;
+ if (options.directives !== false && doc.directives) {
+ const dir = doc.directives.toString(doc);
+ if (dir) {
+ lines.push(dir);
+ hasDirectives = true;
+ }
+ else if (doc.directives.docStart)
+ hasDirectives = true;
+ }
+ if (hasDirectives)
+ lines.push('---');
+ const ctx = stringify.createStringifyContext(doc, options);
+ const { commentString } = ctx.options;
+ if (doc.commentBefore) {
+ if (lines.length !== 1)
+ lines.unshift('');
+ const cs = commentString(doc.commentBefore);
+ lines.unshift(stringifyComment.indentComment(cs, ''));
+ }
+ let chompKeep = false;
+ let contentComment = null;
+ if (doc.contents) {
+ if (Node.isNode(doc.contents)) {
+ if (doc.contents.spaceBefore && hasDirectives)
+ lines.push('');
+ if (doc.contents.commentBefore) {
+ const cs = commentString(doc.contents.commentBefore);
+ lines.push(stringifyComment.indentComment(cs, ''));
+ }
+ // top-level block scalars need to be indented if followed by a comment
+ ctx.forceBlockIndent = !!doc.comment;
+ contentComment = doc.contents.comment;
+ }
+ const onChompKeep = contentComment ? undefined : () => (chompKeep = true);
+ let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep);
+ if (contentComment)
+ body += stringifyComment.lineComment(body, '', commentString(contentComment));
+ if ((body[0] === '|' || body[0] === '>') &&
+ lines[lines.length - 1] === '---') {
+ // Top-level block scalars with a preceding doc marker ought to use the
+ // same line for their header.
+ lines[lines.length - 1] = `--- ${body}`;
+ }
+ else
+ lines.push(body);
+ }
+ else {
+ lines.push(stringify.stringify(doc.contents, ctx));
+ }
+ if ((_a = doc.directives) === null || _a === void 0 ? void 0 : _a.docEnd) {
+ if (doc.comment) {
+ const cs = commentString(doc.comment);
+ if (cs.includes('\n')) {
+ lines.push('...');
+ lines.push(stringifyComment.indentComment(cs, ''));
+ }
+ else {
+ lines.push(`... ${cs}`);
+ }
+ }
+ else {
+ lines.push('...');
+ }
+ }
+ else {
+ let dc = doc.comment;
+ if (dc && chompKeep)
+ dc = dc.replace(/^\n+/, '');
+ if (dc) {
+ if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')
+ lines.push('');
+ lines.push(stringifyComment.indentComment(commentString(dc), ''));
+ }
+ }
+ return lines.join('\n') + '\n';
+}
+
+exports.stringifyDocument = stringifyDocument;
diff --git a/node_modules/yaml/dist/stringify/stringifyNumber.d.ts b/node_modules/yaml/dist/stringify/stringifyNumber.d.ts
new file mode 100644
index 0000000..3c14df1
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyNumber.d.ts
@@ -0,0 +1,2 @@
+import type { Scalar } from '../nodes/Scalar.js';
+export declare function stringifyNumber({ format, minFractionDigits, tag, value }: Scalar): string;
diff --git a/node_modules/yaml/dist/stringify/stringifyNumber.js b/node_modules/yaml/dist/stringify/stringifyNumber.js
new file mode 100644
index 0000000..4118ff6
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyNumber.js
@@ -0,0 +1,26 @@
+'use strict';
+
+function stringifyNumber({ format, minFractionDigits, tag, value }) {
+ if (typeof value === 'bigint')
+ return String(value);
+ const num = typeof value === 'number' ? value : Number(value);
+ if (!isFinite(num))
+ return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';
+ let n = JSON.stringify(value);
+ if (!format &&
+ minFractionDigits &&
+ (!tag || tag === 'tag:yaml.org,2002:float') &&
+ /^\d/.test(n)) {
+ let i = n.indexOf('.');
+ if (i < 0) {
+ i = n.length;
+ n += '.';
+ }
+ let d = minFractionDigits - (n.length - i - 1);
+ while (d-- > 0)
+ n += '0';
+ }
+ return n;
+}
+
+exports.stringifyNumber = stringifyNumber;
diff --git a/node_modules/yaml/dist/stringify/stringifyPair.d.ts b/node_modules/yaml/dist/stringify/stringifyPair.d.ts
new file mode 100644
index 0000000..c512149
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyPair.d.ts
@@ -0,0 +1,3 @@
+import type { Pair } from '../nodes/Pair.js';
+import { StringifyContext } from './stringify.js';
+export declare function stringifyPair({ key, value }: Readonly<Pair>, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
diff --git a/node_modules/yaml/dist/stringify/stringifyPair.js b/node_modules/yaml/dist/stringify/stringifyPair.js
new file mode 100644
index 0000000..f48a053
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyPair.js
@@ -0,0 +1,127 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var Scalar = require('../nodes/Scalar.js');
+var stringify = require('./stringify.js');
+var stringifyComment = require('./stringifyComment.js');
+
+function stringifyPair({ key, value }, ctx, onComment, onChompKeep) {
+ const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;
+ let keyComment = (Node.isNode(key) && key.comment) || null;
+ if (simpleKeys) {
+ if (keyComment) {
+ throw new Error('With simple keys, key nodes cannot have comments');
+ }
+ if (Node.isCollection(key)) {
+ const msg = 'With simple keys, collection cannot be used as a key value';
+ throw new Error(msg);
+ }
+ }
+ let explicitKey = !simpleKeys &&
+ (!key ||
+ (keyComment && value == null && !ctx.inFlow) ||
+ Node.isCollection(key) ||
+ (Node.isScalar(key)
+ ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL
+ : typeof key === 'object'));
+ ctx = Object.assign({}, ctx, {
+ allNullValues: false,
+ implicitKey: !explicitKey && (simpleKeys || !allNullValues),
+ indent: indent + indentStep
+ });
+ let keyCommentDone = false;
+ let chompKeep = false;
+ let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));
+ if (!explicitKey && !ctx.inFlow && str.length > 1024) {
+ if (simpleKeys)
+ throw new Error('With simple keys, single line scalar must not span more than 1024 characters');
+ explicitKey = true;
+ }
+ if (ctx.inFlow) {
+ if (allNullValues || value == null) {
+ if (keyCommentDone && onComment)
+ onComment();
+ return str === '' ? '?' : explicitKey ? `? ${str}` : str;
+ }
+ }
+ else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {
+ str = `? ${str}`;
+ if (keyComment && !keyCommentDone) {
+ str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));
+ }
+ else if (chompKeep && onChompKeep)
+ onChompKeep();
+ return str;
+ }
+ if (keyCommentDone)
+ keyComment = null;
+ if (explicitKey) {
+ if (keyComment)
+ str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));
+ str = `? ${str}\n${indent}:`;
+ }
+ else {
+ str = `${str}:`;
+ if (keyComment)
+ str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));
+ }
+ let vcb = '';
+ let valueComment = null;
+ if (Node.isNode(value)) {
+ if (value.spaceBefore)
+ vcb = '\n';
+ if (value.commentBefore) {
+ const cs = commentString(value.commentBefore);
+ vcb += `\n${stringifyComment.indentComment(cs, ctx.indent)}`;
+ }
+ valueComment = value.comment;
+ }
+ else if (value && typeof value === 'object') {
+ value = doc.createNode(value);
+ }
+ ctx.implicitKey = false;
+ if (!explicitKey && !keyComment && Node.isScalar(value))
+ ctx.indentAtStart = str.length + 1;
+ chompKeep = false;
+ if (!indentSeq &&
+ indentStep.length >= 2 &&
+ !ctx.inFlow &&
+ !explicitKey &&
+ Node.isSeq(value) &&
+ !value.flow &&
+ !value.tag &&
+ !value.anchor) {
+ // If indentSeq === false, consider '- ' as part of indentation where possible
+ ctx.indent = ctx.indent.substr(2);
+ }
+ let valueCommentDone = false;
+ const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));
+ let ws = ' ';
+ if (vcb || keyComment) {
+ if (valueStr === '' && !ctx.inFlow)
+ ws = vcb === '\n' ? '\n\n' : vcb;
+ else
+ ws = `${vcb}\n${ctx.indent}`;
+ }
+ else if (!explicitKey && Node.isCollection(value)) {
+ const flow = valueStr[0] === '[' || valueStr[0] === '{';
+ if (!flow || valueStr.includes('\n'))
+ ws = `\n${ctx.indent}`;
+ }
+ else if (valueStr === '' || valueStr[0] === '\n')
+ ws = '';
+ str += ws + valueStr;
+ if (ctx.inFlow) {
+ if (valueCommentDone && onComment)
+ onComment();
+ }
+ else if (valueComment && !valueCommentDone) {
+ str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment));
+ }
+ else if (chompKeep && onChompKeep) {
+ onChompKeep();
+ }
+ return str;
+}
+
+exports.stringifyPair = stringifyPair;
diff --git a/node_modules/yaml/dist/stringify/stringifyString.d.ts b/node_modules/yaml/dist/stringify/stringifyString.d.ts
new file mode 100644
index 0000000..a9904b9
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyString.d.ts
@@ -0,0 +1,3 @@
+import { Scalar } from '../nodes/Scalar.js';
+import type { StringifyContext } from './stringify.js';
+export declare function stringifyString(item: Scalar, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
diff --git a/node_modules/yaml/dist/stringify/stringifyString.js b/node_modules/yaml/dist/stringify/stringifyString.js
new file mode 100644
index 0000000..8a41503
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyString.js
@@ -0,0 +1,316 @@
+'use strict';
+
+var Scalar = require('../nodes/Scalar.js');
+var foldFlowLines = require('./foldFlowLines.js');
+
+const getFoldOptions = (ctx) => ({
+ indentAtStart: ctx.indentAtStart,
+ lineWidth: ctx.options.lineWidth,
+ minContentWidth: ctx.options.minContentWidth
+});
+// Also checks for lines starting with %, as parsing the output as YAML 1.1 will
+// presume that's starting a new document.
+const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str);
+function lineLengthOverLimit(str, lineWidth, indentLength) {
+ if (!lineWidth || lineWidth < 0)
+ return false;
+ const limit = lineWidth - indentLength;
+ const strLen = str.length;
+ if (strLen <= limit)
+ return false;
+ for (let i = 0, start = 0; i < strLen; ++i) {
+ if (str[i] === '\n') {
+ if (i - start > limit)
+ return true;
+ start = i + 1;
+ if (strLen - start <= limit)
+ return false;
+ }
+ }
+ return true;
+}
+function doubleQuotedString(value, ctx) {
+ const json = JSON.stringify(value);
+ if (ctx.options.doubleQuotedAsJSON)
+ return json;
+ const { implicitKey } = ctx;
+ const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;
+ const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
+ let str = '';
+ let start = 0;
+ for (let i = 0, ch = json[i]; ch; ch = json[++i]) {
+ if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') {
+ // space before newline needs to be escaped to not be folded
+ str += json.slice(start, i) + '\\ ';
+ i += 1;
+ start = i;
+ ch = '\\';
+ }
+ if (ch === '\\')
+ switch (json[i + 1]) {
+ case 'u':
+ {
+ str += json.slice(start, i);
+ const code = json.substr(i + 2, 4);
+ switch (code) {
+ case '0000':
+ str += '\\0';
+ break;
+ case '0007':
+ str += '\\a';
+ break;
+ case '000b':
+ str += '\\v';
+ break;
+ case '001b':
+ str += '\\e';
+ break;
+ case '0085':
+ str += '\\N';
+ break;
+ case '00a0':
+ str += '\\_';
+ break;
+ case '2028':
+ str += '\\L';
+ break;
+ case '2029':
+ str += '\\P';
+ break;
+ default:
+ if (code.substr(0, 2) === '00')
+ str += '\\x' + code.substr(2);
+ else
+ str += json.substr(i, 6);
+ }
+ i += 5;
+ start = i + 1;
+ }
+ break;
+ case 'n':
+ if (implicitKey ||
+ json[i + 2] === '"' ||
+ json.length < minMultiLineLength) {
+ i += 1;
+ }
+ else {
+ // folding will eat first newline
+ str += json.slice(start, i) + '\n\n';
+ while (json[i + 2] === '\\' &&
+ json[i + 3] === 'n' &&
+ json[i + 4] !== '"') {
+ str += '\n';
+ i += 2;
+ }
+ str += indent;
+ // space after newline needs to be escaped to not be folded
+ if (json[i + 2] === ' ')
+ str += '\\';
+ i += 1;
+ start = i + 1;
+ }
+ break;
+ default:
+ i += 1;
+ }
+ }
+ str = start ? str + json.slice(start) : json;
+ return implicitKey
+ ? str
+ : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx));
+}
+function singleQuotedString(value, ctx) {
+ if (ctx.options.singleQuote === false ||
+ (ctx.implicitKey && value.includes('\n')) ||
+ /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline
+ )
+ return doubleQuotedString(value, ctx);
+ const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
+ const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'";
+ return ctx.implicitKey
+ ? res
+ : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));
+}
+function quotedString(value, ctx) {
+ const { singleQuote } = ctx.options;
+ let qs;
+ if (singleQuote === false)
+ qs = doubleQuotedString;
+ else {
+ const hasDouble = value.includes('"');
+ const hasSingle = value.includes("'");
+ if (hasDouble && !hasSingle)
+ qs = singleQuotedString;
+ else if (hasSingle && !hasDouble)
+ qs = doubleQuotedString;
+ else
+ qs = singleQuote ? singleQuotedString : doubleQuotedString;
+ }
+ return qs(value, ctx);
+}
+function blockString({ comment, type, value }, ctx, onComment, onChompKeep) {
+ const { blockQuote, commentString, lineWidth } = ctx.options;
+ // 1. Block can't end in whitespace unless the last line is non-empty.
+ // 2. Strings consisting of only whitespace are best rendered explicitly.
+ if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) {
+ return quotedString(value, ctx);
+ }
+ const indent = ctx.indent ||
+ (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');
+ const literal = blockQuote === 'literal'
+ ? true
+ : blockQuote === 'folded' || type === Scalar.Scalar.BLOCK_FOLDED
+ ? false
+ : type === Scalar.Scalar.BLOCK_LITERAL
+ ? true
+ : !lineLengthOverLimit(value, lineWidth, indent.length);
+ if (!value)
+ return literal ? '|\n' : '>\n';
+ // determine chomping from whitespace at value end
+ let chomp;
+ let endStart;
+ for (endStart = value.length; endStart > 0; --endStart) {
+ const ch = value[endStart - 1];
+ if (ch !== '\n' && ch !== '\t' && ch !== ' ')
+ break;
+ }
+ let end = value.substring(endStart);
+ const endNlPos = end.indexOf('\n');
+ if (endNlPos === -1) {
+ chomp = '-'; // strip
+ }
+ else if (value === end || endNlPos !== end.length - 1) {
+ chomp = '+'; // keep
+ if (onChompKeep)
+ onChompKeep();
+ }
+ else {
+ chomp = ''; // clip
+ }
+ if (end) {
+ value = value.slice(0, -end.length);
+ if (end[end.length - 1] === '\n')
+ end = end.slice(0, -1);
+ end = end.replace(/\n+(?!\n|$)/g, `$&${indent}`);
+ }
+ // determine indent indicator from whitespace at value start
+ let startWithSpace = false;
+ let startEnd;
+ let startNlPos = -1;
+ for (startEnd = 0; startEnd < value.length; ++startEnd) {
+ const ch = value[startEnd];
+ if (ch === ' ')
+ startWithSpace = true;
+ else if (ch === '\n')
+ startNlPos = startEnd;
+ else
+ break;
+ }
+ let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);
+ if (start) {
+ value = value.substring(start.length);
+ start = start.replace(/\n+/g, `$&${indent}`);
+ }
+ const indentSize = indent ? '2' : '1'; // root is at -1
+ let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;
+ if (comment) {
+ header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' '));
+ if (onComment)
+ onComment();
+ }
+ if (literal) {
+ value = value.replace(/\n+/g, `$&${indent}`);
+ return `${header}\n${indent}${start}${value}${end}`;
+ }
+ value = value
+ .replace(/\n+/g, '\n$&')
+ .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
+ // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent
+ .replace(/\n+/g, `$&${indent}`);
+ const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx));
+ return `${header}\n${indent}${body}`;
+}
+function plainString(item, ctx, onComment, onChompKeep) {
+ const { type, value } = item;
+ const { actualString, implicitKey, indent, inFlow } = ctx;
+ if ((implicitKey && /[\n[\]{},]/.test(value)) ||
+ (inFlow && /[[\]{},]/.test(value))) {
+ return quotedString(value, ctx);
+ }
+ if (!value ||
+ /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
+ // not allowed:
+ // - empty string, '-' or '?'
+ // - start with an indicator character (except [?:-]) or /[?-] /
+ // - '\n ', ': ' or ' \n' anywhere
+ // - '#' not preceded by a non-space char
+ // - end with ' ' or ':'
+ return implicitKey || inFlow || !value.includes('\n')
+ ? quotedString(value, ctx)
+ : blockString(item, ctx, onComment, onChompKeep);
+ }
+ if (!implicitKey &&
+ !inFlow &&
+ type !== Scalar.Scalar.PLAIN &&
+ value.includes('\n')) {
+ // Where allowed & type not set explicitly, prefer block style for multiline strings
+ return blockString(item, ctx, onComment, onChompKeep);
+ }
+ if (indent === '' && containsDocumentMarker(value)) {
+ ctx.forceBlockIndent = true;
+ return blockString(item, ctx, onComment, onChompKeep);
+ }
+ const str = value.replace(/\n+/g, `$&\n${indent}`);
+ // Verify that output will be parsed as a string, as e.g. plain numbers and
+ // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),
+ // and others in v1.1.
+ if (actualString) {
+ const test = (tag) => { var _a; return tag.default && tag.tag !== 'tag:yaml.org,2002:str' && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(str)); };
+ const { compat, tags } = ctx.doc.schema;
+ if (tags.some(test) || (compat === null || compat === void 0 ? void 0 : compat.some(test)))
+ return quotedString(value, ctx);
+ }
+ return implicitKey
+ ? str
+ : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));
+}
+function stringifyString(item, ctx, onComment, onChompKeep) {
+ const { implicitKey, inFlow } = ctx;
+ const ss = typeof item.value === 'string'
+ ? item
+ : Object.assign({}, item, { value: String(item.value) });
+ let { type } = item;
+ if (type !== Scalar.Scalar.QUOTE_DOUBLE) {
+ // force double quotes on control characters & unpaired surrogates
+ if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value))
+ type = Scalar.Scalar.QUOTE_DOUBLE;
+ }
+ const _stringify = (_type) => {
+ switch (_type) {
+ case Scalar.Scalar.BLOCK_FOLDED:
+ case Scalar.Scalar.BLOCK_LITERAL:
+ return implicitKey || inFlow
+ ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers
+ : blockString(ss, ctx, onComment, onChompKeep);
+ case Scalar.Scalar.QUOTE_DOUBLE:
+ return doubleQuotedString(ss.value, ctx);
+ case Scalar.Scalar.QUOTE_SINGLE:
+ return singleQuotedString(ss.value, ctx);
+ case Scalar.Scalar.PLAIN:
+ return plainString(ss, ctx, onComment, onChompKeep);
+ default:
+ return null;
+ }
+ };
+ let res = _stringify(type);
+ if (res === null) {
+ const { defaultKeyType, defaultStringType } = ctx.options;
+ const t = (implicitKey && defaultKeyType) || defaultStringType;
+ res = _stringify(t);
+ if (res === null)
+ throw new Error(`Unsupported default string type ${t}`);
+ }
+ return res;
+}
+
+exports.stringifyString = stringifyString;
diff --git a/node_modules/yaml/dist/test-events.d.ts b/node_modules/yaml/dist/test-events.d.ts
new file mode 100644
index 0000000..d1a2348
--- /dev/null
+++ b/node_modules/yaml/dist/test-events.d.ts
@@ -0,0 +1,4 @@
+export declare function testEvents(src: string): {
+ events: string[];
+ error: unknown;
+};
diff --git a/node_modules/yaml/dist/test-events.js b/node_modules/yaml/dist/test-events.js
new file mode 100644
index 0000000..3927b30
--- /dev/null
+++ b/node_modules/yaml/dist/test-events.js
@@ -0,0 +1,135 @@
+'use strict';
+
+var Node = require('./nodes/Node.js');
+var publicApi = require('./public-api.js');
+var visit = require('./visit.js');
+
+const scalarChar = {
+ BLOCK_FOLDED: '>',
+ BLOCK_LITERAL: '|',
+ PLAIN: ':',
+ QUOTE_DOUBLE: '"',
+ QUOTE_SINGLE: "'"
+};
+function anchorExists(doc, anchor) {
+ let found = false;
+ visit.visit(doc, {
+ Value(_key, node) {
+ if (node.anchor === anchor) {
+ found = true;
+ return visit.visit.BREAK;
+ }
+ }
+ });
+ return found;
+}
+// test harness for yaml-test-suite event tests
+function testEvents(src) {
+ var _a;
+ const docs = publicApi.parseAllDocuments(src);
+ const errDoc = docs.find(doc => doc.errors.length > 0);
+ const error = errDoc ? errDoc.errors[0].message : null;
+ const events = ['+STR'];
+ try {
+ for (let i = 0; i < docs.length; ++i) {
+ const doc = docs[i];
+ let root = doc.contents;
+ if (Array.isArray(root))
+ root = root[0];
+ const [rootStart] = doc.range || [0];
+ const error = doc.errors[0];
+ if (error && (!error.pos || error.pos[0] < rootStart))
+ throw new Error();
+ let docStart = '+DOC';
+ if (doc.directives.docStart)
+ docStart += ' ---';
+ else if (doc.contents &&
+ doc.contents.range[2] === doc.contents.range[0] &&
+ !doc.contents.anchor &&
+ !doc.contents.tag)
+ continue;
+ events.push(docStart);
+ addEvents(events, doc, (_a = error === null || error === void 0 ? void 0 : error.pos[0]) !== null && _a !== void 0 ? _a : -1, root);
+ let docEnd = '-DOC';
+ if (doc.directives.docEnd)
+ docEnd += ' ...';
+ events.push(docEnd);
+ }
+ }
+ catch (e) {
+ return { events, error: error !== null && error !== void 0 ? error : e };
+ }
+ events.push('-STR');
+ return { events, error };
+}
+function addEvents(events, doc, errPos, node) {
+ if (!node) {
+ events.push('=VAL :');
+ return;
+ }
+ if (errPos !== -1 && Node.isNode(node) && node.range[0] >= errPos)
+ throw new Error();
+ let props = '';
+ let anchor = Node.isScalar(node) || Node.isCollection(node) ? node.anchor : undefined;
+ if (anchor) {
+ if (/\d$/.test(anchor)) {
+ const alt = anchor.replace(/\d$/, '');
+ if (anchorExists(doc, alt))
+ anchor = alt;
+ }
+ props = ` &${anchor}`;
+ }
+ if (Node.isNode(node) && node.tag)
+ props += ` <${node.tag}>`;
+ if (Node.isMap(node)) {
+ const ev = node.flow ? '+MAP {}' : '+MAP';
+ events.push(`${ev}${props}`);
+ node.items.forEach(({ key, value }) => {
+ addEvents(events, doc, errPos, key);
+ addEvents(events, doc, errPos, value);
+ });
+ events.push('-MAP');
+ }
+ else if (Node.isSeq(node)) {
+ const ev = node.flow ? '+SEQ []' : '+SEQ';
+ events.push(`${ev}${props}`);
+ node.items.forEach(item => {
+ addEvents(events, doc, errPos, item);
+ });
+ events.push('-SEQ');
+ }
+ else if (Node.isPair(node)) {
+ events.push(`+MAP${props}`);
+ addEvents(events, doc, errPos, node.key);
+ addEvents(events, doc, errPos, node.value);
+ events.push('-MAP');
+ }
+ else if (Node.isAlias(node)) {
+ let alias = node.source;
+ if (alias && /\d$/.test(alias)) {
+ const alt = alias.replace(/\d$/, '');
+ if (anchorExists(doc, alt))
+ alias = alt;
+ }
+ events.push(`=ALI${props} *${alias}`);
+ }
+ else {
+ const scalar = scalarChar[String(node.type)];
+ if (!scalar)
+ throw new Error(`Unexpected node type ${node.type}`);
+ const value = node.source
+ .replace(/\\/g, '\\\\')
+ .replace(/\0/g, '\\0')
+ .replace(/\x07/g, '\\a')
+ .replace(/\x08/g, '\\b')
+ .replace(/\t/g, '\\t')
+ .replace(/\n/g, '\\n')
+ .replace(/\v/g, '\\v')
+ .replace(/\f/g, '\\f')
+ .replace(/\r/g, '\\r')
+ .replace(/\x1b/g, '\\e');
+ events.push(`=VAL${props} ${scalar}${value}`);
+ }
+}
+
+exports.testEvents = testEvents;
diff --git a/node_modules/yaml/dist/util.d.ts b/node_modules/yaml/dist/util.d.ts
new file mode 100644
index 0000000..f1012d7
--- /dev/null
+++ b/node_modules/yaml/dist/util.d.ts
@@ -0,0 +1,9 @@
+export { debug, LogLevelId, warn } from './log.js';
+export { findPair } from './nodes/YAMLMap.js';
+export { toJS, ToJSContext } from './nodes/toJS.js';
+export { map as mapTag } from './schema/common/map.js';
+export { seq as seqTag } from './schema/common/seq.js';
+export { string as stringTag } from './schema/common/string.js';
+export { foldFlowLines } from './stringify/foldFlowLines';
+export { stringifyNumber } from './stringify/stringifyNumber.js';
+export { stringifyString } from './stringify/stringifyString.js';
diff --git a/node_modules/yaml/dist/util.js b/node_modules/yaml/dist/util.js
new file mode 100644
index 0000000..9a38c10
--- /dev/null
+++ b/node_modules/yaml/dist/util.js
@@ -0,0 +1,24 @@
+'use strict';
+
+var log = require('./log.js');
+var YAMLMap = require('./nodes/YAMLMap.js');
+var toJS = require('./nodes/toJS.js');
+var map = require('./schema/common/map.js');
+var seq = require('./schema/common/seq.js');
+var string = require('./schema/common/string.js');
+var foldFlowLines = require('./stringify/foldFlowLines.js');
+var stringifyNumber = require('./stringify/stringifyNumber.js');
+var stringifyString = require('./stringify/stringifyString.js');
+
+
+
+exports.debug = log.debug;
+exports.warn = log.warn;
+exports.findPair = YAMLMap.findPair;
+exports.toJS = toJS.toJS;
+exports.mapTag = map.map;
+exports.seqTag = seq.seq;
+exports.stringTag = string.string;
+exports.foldFlowLines = foldFlowLines.foldFlowLines;
+exports.stringifyNumber = stringifyNumber.stringifyNumber;
+exports.stringifyString = stringifyString.stringifyString;
diff --git a/node_modules/yaml/dist/visit.d.ts b/node_modules/yaml/dist/visit.d.ts
new file mode 100644
index 0000000..7926c00
--- /dev/null
+++ b/node_modules/yaml/dist/visit.d.ts
@@ -0,0 +1,102 @@
+import type { Document } from './doc/Document.js';
+import type { Alias } from './nodes/Alias.js';
+import { Node } from './nodes/Node.js';
+import type { Pair } from './nodes/Pair.js';
+import type { Scalar } from './nodes/Scalar.js';
+import type { YAMLMap } from './nodes/YAMLMap.js';
+import type { YAMLSeq } from './nodes/YAMLSeq.js';
+export declare type visitorFn<T> = (key: number | 'key' | 'value' | null, node: T, path: readonly (Document | Node | Pair)[]) => void | symbol | number | Node | Pair;
+export declare type visitor = visitorFn<unknown> | {
+ Alias?: visitorFn<Alias>;
+ Collection?: visitorFn<YAMLMap | YAMLSeq>;
+ Map?: visitorFn<YAMLMap>;
+ Node?: visitorFn<Alias | Scalar | YAMLMap | YAMLSeq>;
+ Pair?: visitorFn<Pair>;
+ Scalar?: visitorFn<Scalar>;
+ Seq?: visitorFn<YAMLSeq>;
+ Value?: visitorFn<Scalar | YAMLMap | YAMLSeq>;
+};
+export declare type asyncVisitorFn<T> = (key: number | 'key' | 'value' | null, node: T, path: readonly (Document | Node | Pair)[]) => void | symbol | number | Node | Pair | Promise<void | symbol | number | Node | Pair>;
+export declare type asyncVisitor = asyncVisitorFn<unknown> | {
+ Alias?: asyncVisitorFn<Alias>;
+ Collection?: asyncVisitorFn<YAMLMap | YAMLSeq>;
+ Map?: asyncVisitorFn<YAMLMap>;
+ Node?: asyncVisitorFn<Alias | Scalar | YAMLMap | YAMLSeq>;
+ Pair?: asyncVisitorFn<Pair>;
+ Scalar?: asyncVisitorFn<Scalar>;
+ Seq?: asyncVisitorFn<YAMLSeq>;
+ Value?: asyncVisitorFn<Scalar | YAMLMap | YAMLSeq>;
+};
+/**
+ * Apply a visitor to an AST node or document.
+ *
+ * Walks through the tree (depth-first) starting from `node`, calling a
+ * `visitor` function with three arguments:
+ * - `key`: For sequence values and map `Pair`, the node's index in the
+ * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
+ * `null` for the root node.
+ * - `node`: The current node.
+ * - `path`: The ancestry of the current node.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this node, continue with next
+ * sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current node, then continue with the next one
+ * - `Node`: Replace the current node, then continue by visiting it
+ * - `number`: While iterating the items of a sequence or map, set the index
+ * of the next step. This is useful especially if the index of the current
+ * node has changed.
+ *
+ * If `visitor` is a single function, it will be called with all values
+ * encountered in the tree, including e.g. `null` values. Alternatively,
+ * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
+ * `Alias` and `Scalar` node. To define the same visitor function for more than
+ * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
+ * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
+ * specific defined one will be used for each node.
+ */
+export declare function visit(node: Node | Document | null, visitor: visitor): void;
+export declare namespace visit {
+ var BREAK: symbol;
+ var SKIP: symbol;
+ var REMOVE: symbol;
+}
+/**
+ * Apply an async visitor to an AST node or document.
+ *
+ * Walks through the tree (depth-first) starting from `node`, calling a
+ * `visitor` function with three arguments:
+ * - `key`: For sequence values and map `Pair`, the node's index in the
+ * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
+ * `null` for the root node.
+ * - `node`: The current node.
+ * - `path`: The ancestry of the current node.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `Promise`: Must resolve to one of the following values
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this node, continue with next
+ * sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current node, then continue with the next one
+ * - `Node`: Replace the current node, then continue by visiting it
+ * - `number`: While iterating the items of a sequence or map, set the index
+ * of the next step. This is useful especially if the index of the current
+ * node has changed.
+ *
+ * If `visitor` is a single function, it will be called with all values
+ * encountered in the tree, including e.g. `null` values. Alternatively,
+ * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
+ * `Alias` and `Scalar` node. To define the same visitor function for more than
+ * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
+ * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
+ * specific defined one will be used for each node.
+ */
+export declare function visitAsync(node: Node | Document | null, visitor: asyncVisitor): Promise<void>;
+export declare namespace visitAsync {
+ var BREAK: symbol;
+ var SKIP: symbol;
+ var REMOVE: symbol;
+}
diff --git a/node_modules/yaml/dist/visit.js b/node_modules/yaml/dist/visit.js
new file mode 100644
index 0000000..aba95b8
--- /dev/null
+++ b/node_modules/yaml/dist/visit.js
@@ -0,0 +1,237 @@
+'use strict';
+
+var Node = require('./nodes/Node.js');
+
+const BREAK = Symbol('break visit');
+const SKIP = Symbol('skip children');
+const REMOVE = Symbol('remove node');
+/**
+ * Apply a visitor to an AST node or document.
+ *
+ * Walks through the tree (depth-first) starting from `node`, calling a
+ * `visitor` function with three arguments:
+ * - `key`: For sequence values and map `Pair`, the node's index in the
+ * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
+ * `null` for the root node.
+ * - `node`: The current node.
+ * - `path`: The ancestry of the current node.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this node, continue with next
+ * sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current node, then continue with the next one
+ * - `Node`: Replace the current node, then continue by visiting it
+ * - `number`: While iterating the items of a sequence or map, set the index
+ * of the next step. This is useful especially if the index of the current
+ * node has changed.
+ *
+ * If `visitor` is a single function, it will be called with all values
+ * encountered in the tree, including e.g. `null` values. Alternatively,
+ * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
+ * `Alias` and `Scalar` node. To define the same visitor function for more than
+ * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
+ * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
+ * specific defined one will be used for each node.
+ */
+function visit(node, visitor) {
+ const visitor_ = initVisitor(visitor);
+ if (Node.isDocument(node)) {
+ const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));
+ if (cd === REMOVE)
+ node.contents = null;
+ }
+ else
+ visit_(null, node, visitor_, Object.freeze([]));
+}
+// Without the `as symbol` casts, TS declares these in the `visit`
+// namespace using `var`, but then complains about that because
+// `unique symbol` must be `const`.
+/** Terminate visit traversal completely */
+visit.BREAK = BREAK;
+/** Do not visit the children of the current node */
+visit.SKIP = SKIP;
+/** Remove the current node */
+visit.REMOVE = REMOVE;
+function visit_(key, node, visitor, path) {
+ const ctrl = callVisitor(key, node, visitor, path);
+ if (Node.isNode(ctrl) || Node.isPair(ctrl)) {
+ replaceNode(key, path, ctrl);
+ return visit_(key, ctrl, visitor, path);
+ }
+ if (typeof ctrl !== 'symbol') {
+ if (Node.isCollection(node)) {
+ path = Object.freeze(path.concat(node));
+ for (let i = 0; i < node.items.length; ++i) {
+ const ci = visit_(i, node.items[i], visitor, path);
+ if (typeof ci === 'number')
+ i = ci - 1;
+ else if (ci === BREAK)
+ return BREAK;
+ else if (ci === REMOVE) {
+ node.items.splice(i, 1);
+ i -= 1;
+ }
+ }
+ }
+ else if (Node.isPair(node)) {
+ path = Object.freeze(path.concat(node));
+ const ck = visit_('key', node.key, visitor, path);
+ if (ck === BREAK)
+ return BREAK;
+ else if (ck === REMOVE)
+ node.key = null;
+ const cv = visit_('value', node.value, visitor, path);
+ if (cv === BREAK)
+ return BREAK;
+ else if (cv === REMOVE)
+ node.value = null;
+ }
+ }
+ return ctrl;
+}
+/**
+ * Apply an async visitor to an AST node or document.
+ *
+ * Walks through the tree (depth-first) starting from `node`, calling a
+ * `visitor` function with three arguments:
+ * - `key`: For sequence values and map `Pair`, the node's index in the
+ * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
+ * `null` for the root node.
+ * - `node`: The current node.
+ * - `path`: The ancestry of the current node.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `Promise`: Must resolve to one of the following values
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this node, continue with next
+ * sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current node, then continue with the next one
+ * - `Node`: Replace the current node, then continue by visiting it
+ * - `number`: While iterating the items of a sequence or map, set the index
+ * of the next step. This is useful especially if the index of the current
+ * node has changed.
+ *
+ * If `visitor` is a single function, it will be called with all values
+ * encountered in the tree, including e.g. `null` values. Alternatively,
+ * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
+ * `Alias` and `Scalar` node. To define the same visitor function for more than
+ * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
+ * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
+ * specific defined one will be used for each node.
+ */
+async function visitAsync(node, visitor) {
+ const visitor_ = initVisitor(visitor);
+ if (Node.isDocument(node)) {
+ const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));
+ if (cd === REMOVE)
+ node.contents = null;
+ }
+ else
+ await visitAsync_(null, node, visitor_, Object.freeze([]));
+}
+// Without the `as symbol` casts, TS declares these in the `visit`
+// namespace using `var`, but then complains about that because
+// `unique symbol` must be `const`.
+/** Terminate visit traversal completely */
+visitAsync.BREAK = BREAK;
+/** Do not visit the children of the current node */
+visitAsync.SKIP = SKIP;
+/** Remove the current node */
+visitAsync.REMOVE = REMOVE;
+async function visitAsync_(key, node, visitor, path) {
+ const ctrl = await callVisitor(key, node, visitor, path);
+ if (Node.isNode(ctrl) || Node.isPair(ctrl)) {
+ replaceNode(key, path, ctrl);
+ return visitAsync_(key, ctrl, visitor, path);
+ }
+ if (typeof ctrl !== 'symbol') {
+ if (Node.isCollection(node)) {
+ path = Object.freeze(path.concat(node));
+ for (let i = 0; i < node.items.length; ++i) {
+ const ci = await visitAsync_(i, node.items[i], visitor, path);
+ if (typeof ci === 'number')
+ i = ci - 1;
+ else if (ci === BREAK)
+ return BREAK;
+ else if (ci === REMOVE) {
+ node.items.splice(i, 1);
+ i -= 1;
+ }
+ }
+ }
+ else if (Node.isPair(node)) {
+ path = Object.freeze(path.concat(node));
+ const ck = await visitAsync_('key', node.key, visitor, path);
+ if (ck === BREAK)
+ return BREAK;
+ else if (ck === REMOVE)
+ node.key = null;
+ const cv = await visitAsync_('value', node.value, visitor, path);
+ if (cv === BREAK)
+ return BREAK;
+ else if (cv === REMOVE)
+ node.value = null;
+ }
+ }
+ return ctrl;
+}
+function initVisitor(visitor) {
+ if (typeof visitor === 'object' &&
+ (visitor.Collection || visitor.Node || visitor.Value)) {
+ return Object.assign({
+ Alias: visitor.Node,
+ Map: visitor.Node,
+ Scalar: visitor.Node,
+ Seq: visitor.Node
+ }, visitor.Value && {
+ Map: visitor.Value,
+ Scalar: visitor.Value,
+ Seq: visitor.Value
+ }, visitor.Collection && {
+ Map: visitor.Collection,
+ Seq: visitor.Collection
+ }, visitor);
+ }
+ return visitor;
+}
+function callVisitor(key, node, visitor, path) {
+ var _a, _b, _c, _d, _e;
+ if (typeof visitor === 'function')
+ return visitor(key, node, path);
+ if (Node.isMap(node))
+ return (_a = visitor.Map) === null || _a === void 0 ? void 0 : _a.call(visitor, key, node, path);
+ if (Node.isSeq(node))
+ return (_b = visitor.Seq) === null || _b === void 0 ? void 0 : _b.call(visitor, key, node, path);
+ if (Node.isPair(node))
+ return (_c = visitor.Pair) === null || _c === void 0 ? void 0 : _c.call(visitor, key, node, path);
+ if (Node.isScalar(node))
+ return (_d = visitor.Scalar) === null || _d === void 0 ? void 0 : _d.call(visitor, key, node, path);
+ if (Node.isAlias(node))
+ return (_e = visitor.Alias) === null || _e === void 0 ? void 0 : _e.call(visitor, key, node, path);
+ return undefined;
+}
+function replaceNode(key, path, node) {
+ const parent = path[path.length - 1];
+ if (Node.isCollection(parent)) {
+ parent.items[key] = node;
+ }
+ else if (Node.isPair(parent)) {
+ if (key === 'key')
+ parent.key = node;
+ else
+ parent.value = node;
+ }
+ else if (Node.isDocument(parent)) {
+ parent.contents = node;
+ }
+ else {
+ const pt = Node.isAlias(parent) ? 'alias' : 'scalar';
+ throw new Error(`Cannot replace node with ${pt} parent`);
+ }
+}
+
+exports.visit = visit;
+exports.visitAsync = visitAsync;
diff --git a/node_modules/yaml/package.json b/node_modules/yaml/package.json
new file mode 100644
index 0000000..0fa9624
--- /dev/null
+++ b/node_modules/yaml/package.json
@@ -0,0 +1,94 @@
+{
+ "name": "yaml",
+ "version": "2.0.0",
+ "license": "ISC",
+ "author": "Eemeli Aro <eemeli@gmail.com>",
+ "repository": "github:eemeli/yaml",
+ "description": "JavaScript parser and stringifier for YAML",
+ "keywords": [
+ "YAML",
+ "parser",
+ "stringifier"
+ ],
+ "homepage": "https://eemeli.org/yaml/",
+ "files": [
+ "browser/",
+ "dist/",
+ "util.d.ts",
+ "util.js"
+ ],
+ "type": "commonjs",
+ "main": "./dist/index.js",
+ "browser": {
+ "./dist/index.js": "./browser/index.js",
+ "./dist/util.js": "./browser/dist/util.js",
+ "./util.js": "./browser/dist/util.js"
+ },
+ "exports": {
+ ".": {
+ "node": "./dist/index.js",
+ "default": "./browser/index.js"
+ },
+ "./package.json": "./package.json",
+ "./util": {
+ "node": "./dist/util.js",
+ "default": "./browser/dist/util.js"
+ }
+ },
+ "scripts": {
+ "build": "npm run build:node && npm run build:browser",
+ "build:browser": "rollup -c config/rollup.browser-config.js",
+ "build:node": "rollup -c config/rollup.node-config.js",
+ "clean": "git clean -fdxe node_modules",
+ "lint": "eslint src/",
+ "prettier": "prettier --write .",
+ "prestart": "npm run build:node",
+ "start": "node -i -e 'YAML=require(\"./dist/index.js\")'",
+ "test": "jest --config config/jest.config.js",
+ "test:all": "npm test && npm run test:types && npm run test:dist && npm run test:dist:types",
+ "test:browsers": "cd playground && npm test",
+ "test:dist": "npm run build:node && jest --config config/jest.config.js",
+ "test:dist:types": "tsc --allowJs --moduleResolution node --noEmit --target es5 dist/index.js",
+ "test:types": "tsc --noEmit",
+ "docs:install": "cd docs-slate && bundle install",
+ "docs:deploy": "cd docs-slate && ./deploy.sh",
+ "docs": "cd docs-slate && bundle exec middleman server",
+ "preversion": "npm test && npm run build",
+ "prepublishOnly": "npm run clean && npm test && npm run build"
+ },
+ "browserslist": "defaults, not ie 11",
+ "prettier": {
+ "arrowParens": "avoid",
+ "semi": false,
+ "singleQuote": true,
+ "trailingComma": "none"
+ },
+ "devDependencies": {
+ "@babel/core": "^7.12.10",
+ "@babel/plugin-proposal-class-properties": "^7.12.1",
+ "@babel/plugin-proposal-nullish-coalescing-operator": "^7.12.1",
+ "@babel/plugin-transform-typescript": "^7.12.17",
+ "@babel/preset-env": "^7.12.11",
+ "@rollup/plugin-babel": "^5.2.3",
+ "@rollup/plugin-replace": "^4.0.0",
+ "@rollup/plugin-typescript": "^8.1.1",
+ "@types/jest": "^27.0.1",
+ "@types/node": "^12.20.47",
+ "@typescript-eslint/eslint-plugin": "^5.3.1",
+ "@typescript-eslint/parser": "^5.3.1",
+ "babel-jest": "^27.0.1",
+ "cross-env": "^7.0.3",
+ "eslint": "^8.2.0",
+ "eslint-config-prettier": "^8.1.0",
+ "fast-check": "^2.12.0",
+ "jest": "^27.0.1",
+ "jest-ts-webcompat-resolver": "^1.0.0",
+ "prettier": "^2.2.1",
+ "rollup": "^2.38.2",
+ "tslib": "^2.1.0",
+ "typescript": "^4.3.5"
+ },
+ "engines": {
+ "node": ">= 14"
+ }
+}
diff --git a/node_modules/yaml/util.d.ts b/node_modules/yaml/util.d.ts
new file mode 100644
index 0000000..c656480
--- /dev/null
+++ b/node_modules/yaml/util.d.ts
@@ -0,0 +1,3 @@
+// Workaround for incomplete exports support in TypeScript
+// https://github.com/microsoft/TypeScript/issues/33079
+export * from './dist/util.js'
diff --git a/node_modules/yaml/util.js b/node_modules/yaml/util.js
new file mode 100644
index 0000000..070103f
--- /dev/null
+++ b/node_modules/yaml/util.js
@@ -0,0 +1,2 @@
+// Re-exporter for Node.js < 12.16.0
+module.exports = require('./dist/util.js')