aboutsummaryrefslogtreecommitdiff
path: root/node_modules/yaml/dist
diff options
context:
space:
mode:
authorMinteck <contact@minteck.org>2022-06-04 08:51:01 +0200
committerMinteck <contact@minteck.org>2022-06-04 08:51:01 +0200
commit383285ecd5292bf9a825e05904955b937de84cc9 (patch)
tree0a53b6f02c1604b078044567c03dc1b6c944c8c2 /node_modules/yaml/dist
downloadequestriadb-383285ecd5292bf9a825e05904955b937de84cc9.tar.gz
equestriadb-383285ecd5292bf9a825e05904955b937de84cc9.tar.bz2
equestriadb-383285ecd5292bf9a825e05904955b937de84cc9.zip
Initial commit
Diffstat (limited to 'node_modules/yaml/dist')
-rw-r--r--node_modules/yaml/dist/compose/compose-collection.d.ts5
-rw-r--r--node_modules/yaml/dist/compose/compose-collection.js61
-rw-r--r--node_modules/yaml/dist/compose/compose-doc.d.ts6
-rw-r--r--node_modules/yaml/dist/compose/compose-doc.js42
-rw-r--r--node_modules/yaml/dist/compose/compose-node.d.ts26
-rw-r--r--node_modules/yaml/dist/compose/compose-node.js93
-rw-r--r--node_modules/yaml/dist/compose/compose-scalar.d.ts5
-rw-r--r--node_modules/yaml/dist/compose/compose-scalar.js83
-rw-r--r--node_modules/yaml/dist/compose/composer.d.ts62
-rw-r--r--node_modules/yaml/dist/compose/composer.js222
-rw-r--r--node_modules/yaml/dist/compose/resolve-block-map.d.ts6
-rw-r--r--node_modules/yaml/dist/compose/resolve-block-map.js110
-rw-r--r--node_modules/yaml/dist/compose/resolve-block-scalar.d.ts10
-rw-r--r--node_modules/yaml/dist/compose/resolve-block-scalar.js196
-rw-r--r--node_modules/yaml/dist/compose/resolve-block-seq.d.ts5
-rw-r--r--node_modules/yaml/dist/compose/resolve-block-seq.js47
-rw-r--r--node_modules/yaml/dist/compose/resolve-end.d.ts6
-rw-r--r--node_modules/yaml/dist/compose/resolve-end.js39
-rw-r--r--node_modules/yaml/dist/compose/resolve-flow-collection.d.ts6
-rw-r--r--node_modules/yaml/dist/compose/resolve-flow-collection.js203
-rw-r--r--node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts10
-rw-r--r--node_modules/yaml/dist/compose/resolve-flow-scalar.js226
-rw-r--r--node_modules/yaml/dist/compose/resolve-props.d.ts22
-rw-r--r--node_modules/yaml/dist/compose/resolve-props.js136
-rw-r--r--node_modules/yaml/dist/compose/util-contains-newline.d.ts2
-rw-r--r--node_modules/yaml/dist/compose/util-contains-newline.js36
-rw-r--r--node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts2
-rw-r--r--node_modules/yaml/dist/compose/util-empty-scalar-position.js29
-rw-r--r--node_modules/yaml/dist/compose/util-flow-indent-check.d.ts3
-rw-r--r--node_modules/yaml/dist/compose/util-flow-indent-check.js17
-rw-r--r--node_modules/yaml/dist/compose/util-map-includes.d.ts4
-rw-r--r--node_modules/yaml/dist/compose/util-map-includes.js19
-rw-r--r--node_modules/yaml/dist/doc/Document.d.ts139
-rw-r--r--node_modules/yaml/dist/doc/Document.js334
-rw-r--r--node_modules/yaml/dist/doc/anchors.d.ts24
-rw-r--r--node_modules/yaml/dist/doc/anchors.js77
-rw-r--r--node_modules/yaml/dist/doc/applyReviver.d.ts9
-rw-r--r--node_modules/yaml/dist/doc/applyReviver.js56
-rw-r--r--node_modules/yaml/dist/doc/createNode.d.ts17
-rw-r--r--node_modules/yaml/dist/doc/createNode.js89
-rw-r--r--node_modules/yaml/dist/doc/directives.d.ts49
-rw-r--r--node_modules/yaml/dist/doc/directives.js171
-rw-r--r--node_modules/yaml/dist/errors.d.ts21
-rw-r--r--node_modules/yaml/dist/errors.js62
-rw-r--r--node_modules/yaml/dist/index.d.ts19
-rw-r--r--node_modules/yaml/dist/index.js50
-rw-r--r--node_modules/yaml/dist/log.d.ts3
-rw-r--r--node_modules/yaml/dist/log.js17
-rw-r--r--node_modules/yaml/dist/node_modules/tslib/tslib.es6.js76
-rw-r--r--node_modules/yaml/dist/nodes/Alias.d.ts28
-rw-r--r--node_modules/yaml/dist/nodes/Alias.js96
-rw-r--r--node_modules/yaml/dist/nodes/Collection.d.ts73
-rw-r--r--node_modules/yaml/dist/nodes/Collection.js150
-rw-r--r--node_modules/yaml/dist/nodes/Node.d.ts53
-rw-r--r--node_modules/yaml/dist/nodes/Node.js66
-rw-r--r--node_modules/yaml/dist/nodes/Pair.d.ts21
-rw-r--r--node_modules/yaml/dist/nodes/Pair.js39
-rw-r--r--node_modules/yaml/dist/nodes/Scalar.d.ts42
-rw-r--r--node_modules/yaml/dist/nodes/Scalar.js26
-rw-r--r--node_modules/yaml/dist/nodes/YAMLMap.d.ts43
-rw-r--r--node_modules/yaml/dist/nodes/YAMLMap.js120
-rw-r--r--node_modules/yaml/dist/nodes/YAMLSeq.d.ts55
-rw-r--r--node_modules/yaml/dist/nodes/YAMLSeq.js107
-rw-r--r--node_modules/yaml/dist/nodes/addPairToJSMap.d.ts3
-rw-r--r--node_modules/yaml/dist/nodes/addPairToJSMap.js106
-rw-r--r--node_modules/yaml/dist/nodes/toJS.d.ts30
-rw-r--r--node_modules/yaml/dist/nodes/toJS.js39
-rw-r--r--node_modules/yaml/dist/options.d.ts330
-rw-r--r--node_modules/yaml/dist/parse/cst-scalar.d.ts64
-rw-r--r--node_modules/yaml/dist/parse/cst-scalar.js219
-rw-r--r--node_modules/yaml/dist/parse/cst-stringify.d.ts8
-rw-r--r--node_modules/yaml/dist/parse/cst-stringify.js63
-rw-r--r--node_modules/yaml/dist/parse/cst-visit.d.ts39
-rw-r--r--node_modules/yaml/dist/parse/cst-visit.js99
-rw-r--r--node_modules/yaml/dist/parse/cst.d.ts106
-rw-r--r--node_modules/yaml/dist/parse/cst.js112
-rw-r--r--node_modules/yaml/dist/parse/lexer.d.ts87
-rw-r--r--node_modules/yaml/dist/parse/lexer.js704
-rw-r--r--node_modules/yaml/dist/parse/line-counter.d.ts22
-rw-r--r--node_modules/yaml/dist/parse/line-counter.js41
-rw-r--r--node_modules/yaml/dist/parse/parser.d.ts84
-rw-r--r--node_modules/yaml/dist/parse/parser.js958
-rw-r--r--node_modules/yaml/dist/public-api.d.ts43
-rw-r--r--node_modules/yaml/dist/public-api.js105
-rw-r--r--node_modules/yaml/dist/schema/Schema.d.ts18
-rw-r--r--node_modules/yaml/dist/schema/Schema.js40
-rw-r--r--node_modules/yaml/dist/schema/common/map.d.ts2
-rw-r--r--node_modules/yaml/dist/schema/common/map.js44
-rw-r--r--node_modules/yaml/dist/schema/common/null.d.ts4
-rw-r--r--node_modules/yaml/dist/schema/common/null.js17
-rw-r--r--node_modules/yaml/dist/schema/common/seq.d.ts2
-rw-r--r--node_modules/yaml/dist/schema/common/seq.js35
-rw-r--r--node_modules/yaml/dist/schema/common/string.d.ts2
-rw-r--r--node_modules/yaml/dist/schema/common/string.js16
-rw-r--r--node_modules/yaml/dist/schema/core/bool.d.ts4
-rw-r--r--node_modules/yaml/dist/schema/core/bool.js21
-rw-r--r--node_modules/yaml/dist/schema/core/float.d.ts4
-rw-r--r--node_modules/yaml/dist/schema/core/float.js47
-rw-r--r--node_modules/yaml/dist/schema/core/int.d.ts4
-rw-r--r--node_modules/yaml/dist/schema/core/int.js42
-rw-r--r--node_modules/yaml/dist/schema/core/schema.d.ts1
-rw-r--r--node_modules/yaml/dist/schema/core/schema.js25
-rw-r--r--node_modules/yaml/dist/schema/json/schema.d.ts2
-rw-r--r--node_modules/yaml/dist/schema/json/schema.js64
-rw-r--r--node_modules/yaml/dist/schema/tags.d.ts40
-rw-r--r--node_modules/yaml/dist/schema/tags.js86
-rw-r--r--node_modules/yaml/dist/schema/types.d.ts82
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts2
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/binary.js68
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts7
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/bool.js29
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/float.d.ts4
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/float.js50
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/int.d.ts5
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/int.js76
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts21
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/omap.js76
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts10
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/pairs.js82
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts1
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/schema.js39
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/set.d.ts22
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/set.js90
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts6
-rw-r--r--node_modules/yaml/dist/schema/yaml-1.1/timestamp.js105
-rw-r--r--node_modules/yaml/dist/stringify/foldFlowLines.d.ts34
-rw-r--r--node_modules/yaml/dist/stringify/foldFlowLines.js140
-rw-r--r--node_modules/yaml/dist/stringify/stringify.d.ts20
-rw-r--r--node_modules/yaml/dist/stringify/stringify.js127
-rw-r--r--node_modules/yaml/dist/stringify/stringifyCollection.d.ts17
-rw-r--r--node_modules/yaml/dist/stringify/stringifyCollection.js154
-rw-r--r--node_modules/yaml/dist/stringify/stringifyComment.d.ts10
-rw-r--r--node_modules/yaml/dist/stringify/stringifyComment.js24
-rw-r--r--node_modules/yaml/dist/stringify/stringifyDocument.d.ts3
-rw-r--r--node_modules/yaml/dist/stringify/stringifyDocument.js88
-rw-r--r--node_modules/yaml/dist/stringify/stringifyNumber.d.ts2
-rw-r--r--node_modules/yaml/dist/stringify/stringifyNumber.js26
-rw-r--r--node_modules/yaml/dist/stringify/stringifyPair.d.ts3
-rw-r--r--node_modules/yaml/dist/stringify/stringifyPair.js127
-rw-r--r--node_modules/yaml/dist/stringify/stringifyString.d.ts3
-rw-r--r--node_modules/yaml/dist/stringify/stringifyString.js316
-rw-r--r--node_modules/yaml/dist/test-events.d.ts4
-rw-r--r--node_modules/yaml/dist/test-events.js135
-rw-r--r--node_modules/yaml/dist/util.d.ts9
-rw-r--r--node_modules/yaml/dist/util.js24
-rw-r--r--node_modules/yaml/dist/visit.d.ts102
-rw-r--r--node_modules/yaml/dist/visit.js237
147 files changed, 10033 insertions, 0 deletions
diff --git a/node_modules/yaml/dist/compose/compose-collection.d.ts b/node_modules/yaml/dist/compose/compose-collection.d.ts
new file mode 100644
index 0000000..7ba83ac
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-collection.d.ts
@@ -0,0 +1,5 @@
+import { ParsedNode } from '../nodes/Node.js';
+import type { BlockMap, BlockSequence, FlowCollection, SourceToken } from '../parse/cst.js';
+import type { ComposeContext, ComposeNode } from './compose-node.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function composeCollection(CN: ComposeNode, ctx: ComposeContext, token: BlockMap | BlockSequence | FlowCollection, tagToken: SourceToken | null, onError: ComposeErrorHandler): ParsedNode;
diff --git a/node_modules/yaml/dist/compose/compose-collection.js b/node_modules/yaml/dist/compose/compose-collection.js
new file mode 100644
index 0000000..52ad885
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-collection.js
@@ -0,0 +1,61 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var Scalar = require('../nodes/Scalar.js');
+var resolveBlockMap = require('./resolve-block-map.js');
+var resolveBlockSeq = require('./resolve-block-seq.js');
+var resolveFlowCollection = require('./resolve-flow-collection.js');
+
+function composeCollection(CN, ctx, token, tagToken, onError) {
+ let coll;
+ switch (token.type) {
+ case 'block-map': {
+ coll = resolveBlockMap.resolveBlockMap(CN, ctx, token, onError);
+ break;
+ }
+ case 'block-seq': {
+ coll = resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError);
+ break;
+ }
+ case 'flow-collection': {
+ coll = resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError);
+ break;
+ }
+ }
+ if (!tagToken)
+ return coll;
+ const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));
+ if (!tagName)
+ return coll;
+ // Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841
+ const Coll = coll.constructor;
+ if (tagName === '!' || tagName === Coll.tagName) {
+ coll.tag = Coll.tagName;
+ return coll;
+ }
+ const expType = Node.isMap(coll) ? 'map' : 'seq';
+ let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName);
+ if (!tag) {
+ const kt = ctx.schema.knownTags[tagName];
+ if (kt && kt.collection === expType) {
+ ctx.schema.tags.push(Object.assign({}, kt, { default: false }));
+ tag = kt;
+ }
+ else {
+ onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);
+ coll.tag = tagName;
+ return coll;
+ }
+ }
+ const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options);
+ const node = Node.isNode(res)
+ ? res
+ : new Scalar.Scalar(res);
+ node.range = coll.range;
+ node.tag = tagName;
+ if (tag === null || tag === void 0 ? void 0 : tag.format)
+ node.format = tag.format;
+ return node;
+}
+
+exports.composeCollection = composeCollection;
diff --git a/node_modules/yaml/dist/compose/compose-doc.d.ts b/node_modules/yaml/dist/compose/compose-doc.d.ts
new file mode 100644
index 0000000..2068f87
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-doc.d.ts
@@ -0,0 +1,6 @@
+import type { Directives } from '../doc/directives.js';
+import { Document } from '../doc/Document.js';
+import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options.js';
+import type * as CST from '../parse/cst.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function composeDoc(options: ParseOptions & DocumentOptions & SchemaOptions, directives: Directives, { offset, start, value, end }: CST.Document, onError: ComposeErrorHandler): Document.Parsed<import("../index.js").ParsedNode>;
diff --git a/node_modules/yaml/dist/compose/compose-doc.js b/node_modules/yaml/dist/compose/compose-doc.js
new file mode 100644
index 0000000..bb6453a
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-doc.js
@@ -0,0 +1,42 @@
+'use strict';
+
+var Document = require('../doc/Document.js');
+var composeNode = require('./compose-node.js');
+var resolveEnd = require('./resolve-end.js');
+var resolveProps = require('./resolve-props.js');
+
+function composeDoc(options, directives, { offset, start, value, end }, onError) {
+ const opts = Object.assign({ directives }, options);
+ const doc = new Document.Document(undefined, opts);
+ const ctx = {
+ atRoot: true,
+ directives: doc.directives,
+ options: doc.options,
+ schema: doc.schema
+ };
+ const props = resolveProps.resolveProps(start, {
+ indicator: 'doc-start',
+ next: value !== null && value !== void 0 ? value : end === null || end === void 0 ? void 0 : end[0],
+ offset,
+ onError,
+ startOnNewline: true
+ });
+ if (props.found) {
+ doc.directives.docStart = true;
+ if (value &&
+ (value.type === 'block-map' || value.type === 'block-seq') &&
+ !props.hasNewline)
+ onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');
+ }
+ doc.contents = value
+ ? composeNode.composeNode(ctx, value, props, onError)
+ : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError);
+ const contentEnd = doc.contents.range[2];
+ const re = resolveEnd.resolveEnd(end, contentEnd, false, onError);
+ if (re.comment)
+ doc.comment = re.comment;
+ doc.range = [offset, contentEnd, re.offset];
+ return doc;
+}
+
+exports.composeDoc = composeDoc;
diff --git a/node_modules/yaml/dist/compose/compose-node.d.ts b/node_modules/yaml/dist/compose/compose-node.d.ts
new file mode 100644
index 0000000..a2f7810
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-node.d.ts
@@ -0,0 +1,26 @@
+import type { Directives } from '../doc/directives.js';
+import type { ParsedNode } from '../nodes/Node.js';
+import type { ParseOptions } from '../options.js';
+import type { SourceToken, Token } from '../parse/cst.js';
+import type { Schema } from '../schema/Schema.js';
+import type { ComposeErrorHandler } from './composer.js';
+export interface ComposeContext {
+ atRoot: boolean;
+ directives: Directives;
+ options: Readonly<Required<Omit<ParseOptions, 'lineCounter'>>>;
+ schema: Readonly<Schema>;
+}
+interface Props {
+ spaceBefore: boolean;
+ comment: string;
+ anchor: SourceToken | null;
+ tag: SourceToken | null;
+}
+declare const CN: {
+ composeNode: typeof composeNode;
+ composeEmptyNode: typeof composeEmptyNode;
+};
+export declare type ComposeNode = typeof CN;
+export declare function composeNode(ctx: ComposeContext, token: Token, props: Props, onError: ComposeErrorHandler): ParsedNode;
+export declare function composeEmptyNode(ctx: ComposeContext, offset: number, before: Token[] | undefined, pos: number | null, { spaceBefore, comment, anchor, tag }: Props, onError: ComposeErrorHandler): import("../index.js").Scalar.Parsed;
+export {};
diff --git a/node_modules/yaml/dist/compose/compose-node.js b/node_modules/yaml/dist/compose/compose-node.js
new file mode 100644
index 0000000..df193ca
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-node.js
@@ -0,0 +1,93 @@
+'use strict';
+
+var Alias = require('../nodes/Alias.js');
+var composeCollection = require('./compose-collection.js');
+var composeScalar = require('./compose-scalar.js');
+var resolveEnd = require('./resolve-end.js');
+var utilEmptyScalarPosition = require('./util-empty-scalar-position.js');
+
+const CN = { composeNode, composeEmptyNode };
+function composeNode(ctx, token, props, onError) {
+ const { spaceBefore, comment, anchor, tag } = props;
+ let node;
+ let isSrcToken = true;
+ switch (token.type) {
+ case 'alias':
+ node = composeAlias(ctx, token, onError);
+ if (anchor || tag)
+ onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');
+ break;
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ case 'block-scalar':
+ node = composeScalar.composeScalar(ctx, token, tag, onError);
+ if (anchor)
+ node.anchor = anchor.source.substring(1);
+ break;
+ case 'block-map':
+ case 'block-seq':
+ case 'flow-collection':
+ node = composeCollection.composeCollection(CN, ctx, token, tag, onError);
+ if (anchor)
+ node.anchor = anchor.source.substring(1);
+ break;
+ default: {
+ const message = token.type === 'error'
+ ? token.message
+ : `Unsupported token (type: ${token.type})`;
+ onError(token, 'UNEXPECTED_TOKEN', message);
+ node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);
+ isSrcToken = false;
+ }
+ }
+ if (anchor && node.anchor === '')
+ onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
+ if (spaceBefore)
+ node.spaceBefore = true;
+ if (comment) {
+ if (token.type === 'scalar' && token.source === '')
+ node.comment = comment;
+ else
+ node.commentBefore = comment;
+ }
+ // @ts-expect-error Type checking misses meaning of isSrcToken
+ if (ctx.options.keepSourceTokens && isSrcToken)
+ node.srcToken = token;
+ return node;
+}
+function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag }, onError) {
+ const token = {
+ type: 'scalar',
+ offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos),
+ indent: -1,
+ source: ''
+ };
+ const node = composeScalar.composeScalar(ctx, token, tag, onError);
+ if (anchor) {
+ node.anchor = anchor.source.substring(1);
+ if (node.anchor === '')
+ onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
+ }
+ if (spaceBefore)
+ node.spaceBefore = true;
+ if (comment)
+ node.comment = comment;
+ return node;
+}
+function composeAlias({ options }, { offset, source, end }, onError) {
+ const alias = new Alias.Alias(source.substring(1));
+ if (alias.source === '')
+ onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');
+ if (alias.source.endsWith(':'))
+ onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);
+ const valueEnd = offset + source.length;
+ const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError);
+ alias.range = [offset, valueEnd, re.offset];
+ if (re.comment)
+ alias.comment = re.comment;
+ return alias;
+}
+
+exports.composeEmptyNode = composeEmptyNode;
+exports.composeNode = composeNode;
diff --git a/node_modules/yaml/dist/compose/compose-scalar.d.ts b/node_modules/yaml/dist/compose/compose-scalar.d.ts
new file mode 100644
index 0000000..d5d0f79
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-scalar.d.ts
@@ -0,0 +1,5 @@
+import { Scalar } from '../nodes/Scalar.js';
+import type { BlockScalar, FlowScalar, SourceToken } from '../parse/cst.js';
+import type { ComposeContext } from './compose-node.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function composeScalar(ctx: ComposeContext, token: FlowScalar | BlockScalar, tagToken: SourceToken | null, onError: ComposeErrorHandler): Scalar.Parsed;
diff --git a/node_modules/yaml/dist/compose/compose-scalar.js b/node_modules/yaml/dist/compose/compose-scalar.js
new file mode 100644
index 0000000..ad2963c
--- /dev/null
+++ b/node_modules/yaml/dist/compose/compose-scalar.js
@@ -0,0 +1,83 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var Scalar = require('../nodes/Scalar.js');
+var resolveBlockScalar = require('./resolve-block-scalar.js');
+var resolveFlowScalar = require('./resolve-flow-scalar.js');
+
+function composeScalar(ctx, token, tagToken, onError) {
+ const { value, type, comment, range } = token.type === 'block-scalar'
+ ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError)
+ : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError);
+ const tagName = tagToken
+ ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
+ : null;
+ const tag = tagToken && tagName
+ ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)
+ : token.type === 'scalar'
+ ? findScalarTagByTest(ctx, value, token, onError)
+ : ctx.schema[Node.SCALAR];
+ let scalar;
+ try {
+ const res = tag.resolve(value, msg => onError(tagToken !== null && tagToken !== void 0 ? tagToken : token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
+ scalar = Node.isScalar(res) ? res : new Scalar.Scalar(res);
+ }
+ catch (error) {
+ const msg = error instanceof Error ? error.message : String(error);
+ onError(tagToken !== null && tagToken !== void 0 ? tagToken : token, 'TAG_RESOLVE_FAILED', msg);
+ scalar = new Scalar.Scalar(value);
+ }
+ scalar.range = range;
+ scalar.source = value;
+ if (type)
+ scalar.type = type;
+ if (tagName)
+ scalar.tag = tagName;
+ if (tag.format)
+ scalar.format = tag.format;
+ if (comment)
+ scalar.comment = comment;
+ return scalar;
+}
+function findScalarTagByName(schema, value, tagName, tagToken, onError) {
+ var _a;
+ if (tagName === '!')
+ return schema[Node.SCALAR]; // non-specific tag
+ const matchWithTest = [];
+ for (const tag of schema.tags) {
+ if (!tag.collection && tag.tag === tagName) {
+ if (tag.default && tag.test)
+ matchWithTest.push(tag);
+ else
+ return tag;
+ }
+ }
+ for (const tag of matchWithTest)
+ if ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value))
+ return tag;
+ const kt = schema.knownTags[tagName];
+ if (kt && !kt.collection) {
+ // Ensure that the known tag is available for stringifying,
+ // but does not get used by default.
+ schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));
+ return kt;
+ }
+ onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
+ return schema[Node.SCALAR];
+}
+function findScalarTagByTest({ directives, schema }, value, token, onError) {
+ var _a;
+ const tag = schema.tags.find(tag => { var _a; return tag.default && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value)); }) || schema[Node.SCALAR];
+ if (schema.compat) {
+ const compat = (_a = schema.compat.find(tag => { var _a; return tag.default && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(value)); })) !== null && _a !== void 0 ? _a : schema[Node.SCALAR];
+ if (tag.tag !== compat.tag) {
+ const ts = directives.tagString(tag.tag);
+ const cs = directives.tagString(compat.tag);
+ const msg = `Value may be parsed as either ${ts} or ${cs}`;
+ onError(token, 'TAG_RESOLVE_FAILED', msg, true);
+ }
+ }
+ return tag;
+}
+
+exports.composeScalar = composeScalar;
diff --git a/node_modules/yaml/dist/compose/composer.d.ts b/node_modules/yaml/dist/compose/composer.d.ts
new file mode 100644
index 0000000..9db2477
--- /dev/null
+++ b/node_modules/yaml/dist/compose/composer.d.ts
@@ -0,0 +1,62 @@
+import { Directives } from '../doc/directives.js';
+import { Document } from '../doc/Document.js';
+import { ErrorCode, YAMLParseError, YAMLWarning } from '../errors.js';
+import { Range } from '../nodes/Node.js';
+import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options.js';
+import type { Token } from '../parse/cst.js';
+declare type ErrorSource = number | [number, number] | Range | {
+ offset: number;
+ source?: string;
+};
+export declare type ComposeErrorHandler = (source: ErrorSource, code: ErrorCode, message: string, warning?: boolean) => void;
+/**
+ * Compose a stream of CST nodes into a stream of YAML Documents.
+ *
+ * ```ts
+ * import { Composer, Parser } from 'yaml'
+ *
+ * const src: string = ...
+ * const tokens = new Parser().parse(src)
+ * const docs = new Composer().compose(tokens)
+ * ```
+ */
+export declare class Composer {
+ private directives;
+ private doc;
+ private options;
+ private atDirectives;
+ private prelude;
+ private errors;
+ private warnings;
+ constructor(options?: ParseOptions & DocumentOptions & SchemaOptions);
+ private onError;
+ private decorate;
+ /**
+ * Current stream status information.
+ *
+ * Mostly useful at the end of input for an empty stream.
+ */
+ streamInfo(): {
+ comment: string;
+ directives: Directives;
+ errors: YAMLParseError[];
+ warnings: YAMLWarning[];
+ };
+ /**
+ * Compose tokens into documents.
+ *
+ * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
+ * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
+ */
+ compose(tokens: Iterable<Token>, forceDoc?: boolean, endOffset?: number): Generator<Document.Parsed<import("../nodes/Node.js").ParsedNode>, void, unknown>;
+ /** Advance the composer by one CST token. */
+ next(token: Token): Generator<Document.Parsed<import("../nodes/Node.js").ParsedNode>, void, unknown>;
+ /**
+ * Call at end of input to yield any remaining document.
+ *
+ * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
+ * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
+ */
+ end(forceDoc?: boolean, endOffset?: number): Generator<Document.Parsed<import("../nodes/Node.js").ParsedNode>, void, unknown>;
+}
+export {};
diff --git a/node_modules/yaml/dist/compose/composer.js b/node_modules/yaml/dist/compose/composer.js
new file mode 100644
index 0000000..2477a49
--- /dev/null
+++ b/node_modules/yaml/dist/compose/composer.js
@@ -0,0 +1,222 @@
+'use strict';
+
+var directives = require('../doc/directives.js');
+var Document = require('../doc/Document.js');
+var errors = require('../errors.js');
+var Node = require('../nodes/Node.js');
+var composeDoc = require('./compose-doc.js');
+var resolveEnd = require('./resolve-end.js');
+
+function getErrorPos(src) {
+ if (typeof src === 'number')
+ return [src, src + 1];
+ if (Array.isArray(src))
+ return src.length === 2 ? src : [src[0], src[1]];
+ const { offset, source } = src;
+ return [offset, offset + (typeof source === 'string' ? source.length : 1)];
+}
+function parsePrelude(prelude) {
+ var _a;
+ let comment = '';
+ let atComment = false;
+ let afterEmptyLine = false;
+ for (let i = 0; i < prelude.length; ++i) {
+ const source = prelude[i];
+ switch (source[0]) {
+ case '#':
+ comment +=
+ (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') +
+ (source.substring(1) || ' ');
+ atComment = true;
+ afterEmptyLine = false;
+ break;
+ case '%':
+ if (((_a = prelude[i + 1]) === null || _a === void 0 ? void 0 : _a[0]) !== '#')
+ i += 1;
+ atComment = false;
+ break;
+ default:
+ // This may be wrong after doc-end, but in that case it doesn't matter
+ if (!atComment)
+ afterEmptyLine = true;
+ atComment = false;
+ }
+ }
+ return { comment, afterEmptyLine };
+}
+/**
+ * Compose a stream of CST nodes into a stream of YAML Documents.
+ *
+ * ```ts
+ * import { Composer, Parser } from 'yaml'
+ *
+ * const src: string = ...
+ * const tokens = new Parser().parse(src)
+ * const docs = new Composer().compose(tokens)
+ * ```
+ */
+class Composer {
+ constructor(options = {}) {
+ this.doc = null;
+ this.atDirectives = false;
+ this.prelude = [];
+ this.errors = [];
+ this.warnings = [];
+ this.onError = (source, code, message, warning) => {
+ const pos = getErrorPos(source);
+ if (warning)
+ this.warnings.push(new errors.YAMLWarning(pos, code, message));
+ else
+ this.errors.push(new errors.YAMLParseError(pos, code, message));
+ };
+ // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
+ this.directives = new directives.Directives({ version: options.version || '1.2' });
+ this.options = options;
+ }
+ decorate(doc, afterDoc) {
+ const { comment, afterEmptyLine } = parsePrelude(this.prelude);
+ //console.log({ dc: doc.comment, prelude, comment })
+ if (comment) {
+ const dc = doc.contents;
+ if (afterDoc) {
+ doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment;
+ }
+ else if (afterEmptyLine || doc.directives.docStart || !dc) {
+ doc.commentBefore = comment;
+ }
+ else if (Node.isCollection(dc) && !dc.flow && dc.items.length > 0) {
+ let it = dc.items[0];
+ if (Node.isPair(it))
+ it = it.key;
+ const cb = it.commentBefore;
+ it.commentBefore = cb ? `${comment}\n${cb}` : comment;
+ }
+ else {
+ const cb = dc.commentBefore;
+ dc.commentBefore = cb ? `${comment}\n${cb}` : comment;
+ }
+ }
+ if (afterDoc) {
+ Array.prototype.push.apply(doc.errors, this.errors);
+ Array.prototype.push.apply(doc.warnings, this.warnings);
+ }
+ else {
+ doc.errors = this.errors;
+ doc.warnings = this.warnings;
+ }
+ this.prelude = [];
+ this.errors = [];
+ this.warnings = [];
+ }
+ /**
+ * Current stream status information.
+ *
+ * Mostly useful at the end of input for an empty stream.
+ */
+ streamInfo() {
+ return {
+ comment: parsePrelude(this.prelude).comment,
+ directives: this.directives,
+ errors: this.errors,
+ warnings: this.warnings
+ };
+ }
+ /**
+ * Compose tokens into documents.
+ *
+ * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
+ * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
+ */
+ *compose(tokens, forceDoc = false, endOffset = -1) {
+ for (const token of tokens)
+ yield* this.next(token);
+ yield* this.end(forceDoc, endOffset);
+ }
+ /** Advance the composer by one CST token. */
+ *next(token) {
+ if (process.env.LOG_STREAM)
+ console.dir(token, { depth: null });
+ switch (token.type) {
+ case 'directive':
+ this.directives.add(token.source, (offset, message, warning) => {
+ const pos = getErrorPos(token);
+ pos[0] += offset;
+ this.onError(pos, 'BAD_DIRECTIVE', message, warning);
+ });
+ this.prelude.push(token.source);
+ this.atDirectives = true;
+ break;
+ case 'document': {
+ const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError);
+ if (this.atDirectives && !doc.directives.docStart)
+ this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');
+ this.decorate(doc, false);
+ if (this.doc)
+ yield this.doc;
+ this.doc = doc;
+ this.atDirectives = false;
+ break;
+ }
+ case 'byte-order-mark':
+ case 'space':
+ break;
+ case 'comment':
+ case 'newline':
+ this.prelude.push(token.source);
+ break;
+ case 'error': {
+ const msg = token.source
+ ? `${token.message}: ${JSON.stringify(token.source)}`
+ : token.message;
+ const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);
+ if (this.atDirectives || !this.doc)
+ this.errors.push(error);
+ else
+ this.doc.errors.push(error);
+ break;
+ }
+ case 'doc-end': {
+ if (!this.doc) {
+ const msg = 'Unexpected doc-end without preceding document';
+ this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));
+ break;
+ }
+ this.doc.directives.docEnd = true;
+ const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);
+ this.decorate(this.doc, true);
+ if (end.comment) {
+ const dc = this.doc.comment;
+ this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment;
+ }
+ this.doc.range[2] = end.offset;
+ break;
+ }
+ default:
+ this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));
+ }
+ }
+ /**
+ * Call at end of input to yield any remaining document.
+ *
+ * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
+ * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
+ */
+ *end(forceDoc = false, endOffset = -1) {
+ if (this.doc) {
+ this.decorate(this.doc, true);
+ yield this.doc;
+ this.doc = null;
+ }
+ else if (forceDoc) {
+ const opts = Object.assign({ directives: this.directives }, this.options);
+ const doc = new Document.Document(undefined, opts);
+ if (this.atDirectives)
+ this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');
+ doc.range = [0, endOffset, endOffset];
+ this.decorate(doc, false);
+ yield doc;
+ }
+ }
+}
+
+exports.Composer = Composer;
diff --git a/node_modules/yaml/dist/compose/resolve-block-map.d.ts b/node_modules/yaml/dist/compose/resolve-block-map.d.ts
new file mode 100644
index 0000000..dbcd3fa
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-block-map.d.ts
@@ -0,0 +1,6 @@
+import type { ParsedNode } from '../nodes/Node.js';
+import { YAMLMap } from '../nodes/YAMLMap.js';
+import type { BlockMap } from '../parse/cst.js';
+import type { ComposeContext, ComposeNode } from './compose-node.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function resolveBlockMap({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bm: BlockMap, onError: ComposeErrorHandler): YAMLMap.Parsed<ParsedNode, ParsedNode | null>;
diff --git a/node_modules/yaml/dist/compose/resolve-block-map.js b/node_modules/yaml/dist/compose/resolve-block-map.js
new file mode 100644
index 0000000..142848b
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-block-map.js
@@ -0,0 +1,110 @@
+'use strict';
+
+var Pair = require('../nodes/Pair.js');
+var YAMLMap = require('../nodes/YAMLMap.js');
+var resolveProps = require('./resolve-props.js');
+var utilContainsNewline = require('./util-contains-newline.js');
+var utilFlowIndentCheck = require('./util-flow-indent-check.js');
+var utilMapIncludes = require('./util-map-includes.js');
+
+const startColMsg = 'All mapping items must start at the same column';
+function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) {
+ var _a;
+ const map = new YAMLMap.YAMLMap(ctx.schema);
+ if (ctx.atRoot)
+ ctx.atRoot = false;
+ let offset = bm.offset;
+ for (const collItem of bm.items) {
+ const { start, key, sep, value } = collItem;
+ // key properties
+ const keyProps = resolveProps.resolveProps(start, {
+ indicator: 'explicit-key-ind',
+ next: key !== null && key !== void 0 ? key : sep === null || sep === void 0 ? void 0 : sep[0],
+ offset,
+ onError,
+ startOnNewline: true
+ });
+ const implicitKey = !keyProps.found;
+ if (implicitKey) {
+ if (key) {
+ if (key.type === 'block-seq')
+ onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');
+ else if ('indent' in key && key.indent !== bm.indent)
+ onError(offset, 'BAD_INDENT', startColMsg);
+ }
+ if (!keyProps.anchor && !keyProps.tag && !sep) {
+ // TODO: assert being at last item?
+ if (keyProps.comment) {
+ if (map.comment)
+ map.comment += '\n' + keyProps.comment;
+ else
+ map.comment = keyProps.comment;
+ }
+ continue;
+ }
+ if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) {
+ onError(key !== null && key !== void 0 ? key : start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');
+ }
+ }
+ else if (((_a = keyProps.found) === null || _a === void 0 ? void 0 : _a.indent) !== bm.indent) {
+ onError(offset, 'BAD_INDENT', startColMsg);
+ }
+ // key value
+ const keyStart = keyProps.end;
+ const keyNode = key
+ ? composeNode(ctx, key, keyProps, onError)
+ : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
+ if (ctx.schema.compat)
+ utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);
+ if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))
+ onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
+ // value properties
+ const valueProps = resolveProps.resolveProps(sep !== null && sep !== void 0 ? sep : [], {
+ indicator: 'map-value-ind',
+ next: value,
+ offset: keyNode.range[2],
+ onError,
+ startOnNewline: !key || key.type === 'block-scalar'
+ });
+ offset = valueProps.end;
+ if (valueProps.found) {
+ if (implicitKey) {
+ if ((value === null || value === void 0 ? void 0 : value.type) === 'block-map' && !valueProps.hasNewline)
+ onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');
+ if (ctx.options.strict &&
+ keyProps.start < valueProps.found.offset - 1024)
+ onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');
+ }
+ // value value
+ const valueNode = value
+ ? composeNode(ctx, value, valueProps, onError)
+ : composeEmptyNode(ctx, offset, sep, null, valueProps, onError);
+ if (ctx.schema.compat)
+ utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError);
+ offset = valueNode.range[2];
+ const pair = new Pair.Pair(keyNode, valueNode);
+ if (ctx.options.keepSourceTokens)
+ pair.srcToken = collItem;
+ map.items.push(pair);
+ }
+ else {
+ // key with no value
+ if (implicitKey)
+ onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');
+ if (valueProps.comment) {
+ if (keyNode.comment)
+ keyNode.comment += '\n' + valueProps.comment;
+ else
+ keyNode.comment = valueProps.comment;
+ }
+ const pair = new Pair.Pair(keyNode);
+ if (ctx.options.keepSourceTokens)
+ pair.srcToken = collItem;
+ map.items.push(pair);
+ }
+ }
+ map.range = [bm.offset, offset, offset];
+ return map;
+}
+
+exports.resolveBlockMap = resolveBlockMap;
diff --git a/node_modules/yaml/dist/compose/resolve-block-scalar.d.ts b/node_modules/yaml/dist/compose/resolve-block-scalar.d.ts
new file mode 100644
index 0000000..4855b19
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-block-scalar.d.ts
@@ -0,0 +1,10 @@
+import { Range } from '../nodes/Node.js';
+import { Scalar } from '../nodes/Scalar.js';
+import type { BlockScalar } from '../parse/cst.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function resolveBlockScalar(scalar: BlockScalar, strict: boolean, onError: ComposeErrorHandler): {
+ value: string;
+ type: Scalar.BLOCK_FOLDED | Scalar.BLOCK_LITERAL | null;
+ comment: string;
+ range: Range;
+};
diff --git a/node_modules/yaml/dist/compose/resolve-block-scalar.js b/node_modules/yaml/dist/compose/resolve-block-scalar.js
new file mode 100644
index 0000000..b5a350d
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-block-scalar.js
@@ -0,0 +1,196 @@
+'use strict';
+
+var Scalar = require('../nodes/Scalar.js');
+
+function resolveBlockScalar(scalar, strict, onError) {
+ const start = scalar.offset;
+ const header = parseBlockScalarHeader(scalar, strict, onError);
+ if (!header)
+ return { value: '', type: null, comment: '', range: [start, start, start] };
+ const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL;
+ const lines = scalar.source ? splitLines(scalar.source) : [];
+ // determine the end of content & start of chomping
+ let chompStart = lines.length;
+ for (let i = lines.length - 1; i >= 0; --i) {
+ const content = lines[i][1];
+ if (content === '' || content === '\r')
+ chompStart = i;
+ else
+ break;
+ }
+ // shortcut for empty contents
+ if (chompStart === 0) {
+ const value = header.chomp === '+' && lines.length > 0
+ ? '\n'.repeat(Math.max(1, lines.length - 1))
+ : '';
+ let end = start + header.length;
+ if (scalar.source)
+ end += scalar.source.length;
+ return { value, type, comment: header.comment, range: [start, end, end] };
+ }
+ // find the indentation level to trim from start
+ let trimIndent = scalar.indent + header.indent;
+ let offset = scalar.offset + header.length;
+ let contentStart = 0;
+ for (let i = 0; i < chompStart; ++i) {
+ const [indent, content] = lines[i];
+ if (content === '' || content === '\r') {
+ if (header.indent === 0 && indent.length > trimIndent)
+ trimIndent = indent.length;
+ }
+ else {
+ if (indent.length < trimIndent) {
+ const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';
+ onError(offset + indent.length, 'MISSING_CHAR', message);
+ }
+ if (header.indent === 0)
+ trimIndent = indent.length;
+ contentStart = i;
+ break;
+ }
+ offset += indent.length + content.length + 1;
+ }
+ // include trailing more-indented empty lines in content
+ for (let i = lines.length - 1; i >= chompStart; --i) {
+ if (lines[i][0].length > trimIndent)
+ chompStart = i + 1;
+ }
+ let value = '';
+ let sep = '';
+ let prevMoreIndented = false;
+ // leading whitespace is kept intact
+ for (let i = 0; i < contentStart; ++i)
+ value += lines[i][0].slice(trimIndent) + '\n';
+ for (let i = contentStart; i < chompStart; ++i) {
+ let [indent, content] = lines[i];
+ offset += indent.length + content.length + 1;
+ const crlf = content[content.length - 1] === '\r';
+ if (crlf)
+ content = content.slice(0, -1);
+ /* istanbul ignore if already caught in lexer */
+ if (content && indent.length < trimIndent) {
+ const src = header.indent
+ ? 'explicit indentation indicator'
+ : 'first line';
+ const message = `Block scalar lines must not be less indented than their ${src}`;
+ onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);
+ indent = '';
+ }
+ if (type === Scalar.Scalar.BLOCK_LITERAL) {
+ value += sep + indent.slice(trimIndent) + content;
+ sep = '\n';
+ }
+ else if (indent.length > trimIndent || content[0] === '\t') {
+ // more-indented content within a folded block
+ if (sep === ' ')
+ sep = '\n';
+ else if (!prevMoreIndented && sep === '\n')
+ sep = '\n\n';
+ value += sep + indent.slice(trimIndent) + content;
+ sep = '\n';
+ prevMoreIndented = true;
+ }
+ else if (content === '') {
+ // empty line
+ if (sep === '\n')
+ value += '\n';
+ else
+ sep = '\n';
+ }
+ else {
+ value += sep + content;
+ sep = ' ';
+ prevMoreIndented = false;
+ }
+ }
+ switch (header.chomp) {
+ case '-':
+ break;
+ case '+':
+ for (let i = chompStart; i < lines.length; ++i)
+ value += '\n' + lines[i][0].slice(trimIndent);
+ if (value[value.length - 1] !== '\n')
+ value += '\n';
+ break;
+ default:
+ value += '\n';
+ }
+ const end = start + header.length + scalar.source.length;
+ return { value, type, comment: header.comment, range: [start, end, end] };
+}
+function parseBlockScalarHeader({ offset, props }, strict, onError) {
+ /* istanbul ignore if should not happen */
+ if (props[0].type !== 'block-scalar-header') {
+ onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');
+ return null;
+ }
+ const { source } = props[0];
+ const mode = source[0];
+ let indent = 0;
+ let chomp = '';
+ let error = -1;
+ for (let i = 1; i < source.length; ++i) {
+ const ch = source[i];
+ if (!chomp && (ch === '-' || ch === '+'))
+ chomp = ch;
+ else {
+ const n = Number(ch);
+ if (!indent && n)
+ indent = n;
+ else if (error === -1)
+ error = offset + i;
+ }
+ }
+ if (error !== -1)
+ onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);
+ let hasSpace = false;
+ let comment = '';
+ let length = source.length;
+ for (let i = 1; i < props.length; ++i) {
+ const token = props[i];
+ switch (token.type) {
+ case 'space':
+ hasSpace = true;
+ // fallthrough
+ case 'newline':
+ length += token.source.length;
+ break;
+ case 'comment':
+ if (strict && !hasSpace) {
+ const message = 'Comments must be separated from other tokens by white space characters';
+ onError(token, 'MISSING_CHAR', message);
+ }
+ length += token.source.length;
+ comment = token.source.substring(1);
+ break;
+ case 'error':
+ onError(token, 'UNEXPECTED_TOKEN', token.message);
+ length += token.source.length;
+ break;
+ /* istanbul ignore next should not happen */
+ default: {
+ const message = `Unexpected token in block scalar header: ${token.type}`;
+ onError(token, 'UNEXPECTED_TOKEN', message);
+ const ts = token.source;
+ if (ts && typeof ts === 'string')
+ length += ts.length;
+ }
+ }
+ }
+ return { mode, indent, chomp, comment, length };
+}
+/** @returns Array of lines split up as `[indent, content]` */
+function splitLines(source) {
+ const split = source.split(/\n( *)/);
+ const first = split[0];
+ const m = first.match(/^( *)/);
+ const line0 = (m === null || m === void 0 ? void 0 : m[1])
+ ? [m[1], first.slice(m[1].length)]
+ : ['', first];
+ const lines = [line0];
+ for (let i = 1; i < split.length; i += 2)
+ lines.push([split[i], split[i + 1]]);
+ return lines;
+}
+
+exports.resolveBlockScalar = resolveBlockScalar;
diff --git a/node_modules/yaml/dist/compose/resolve-block-seq.d.ts b/node_modules/yaml/dist/compose/resolve-block-seq.d.ts
new file mode 100644
index 0000000..25ed558
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-block-seq.d.ts
@@ -0,0 +1,5 @@
+import { YAMLSeq } from '../nodes/YAMLSeq.js';
+import type { BlockSequence } from '../parse/cst.js';
+import type { ComposeContext, ComposeNode } from './compose-node.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function resolveBlockSeq({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bs: BlockSequence, onError: ComposeErrorHandler): YAMLSeq.Parsed<import("../index.js").ParsedNode>;
diff --git a/node_modules/yaml/dist/compose/resolve-block-seq.js b/node_modules/yaml/dist/compose/resolve-block-seq.js
new file mode 100644
index 0000000..1e7e86a
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-block-seq.js
@@ -0,0 +1,47 @@
+'use strict';
+
+var YAMLSeq = require('../nodes/YAMLSeq.js');
+var resolveProps = require('./resolve-props.js');
+var utilFlowIndentCheck = require('./util-flow-indent-check.js');
+
+function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) {
+ const seq = new YAMLSeq.YAMLSeq(ctx.schema);
+ if (ctx.atRoot)
+ ctx.atRoot = false;
+ let offset = bs.offset;
+ for (const { start, value } of bs.items) {
+ const props = resolveProps.resolveProps(start, {
+ indicator: 'seq-item-ind',
+ next: value,
+ offset,
+ onError,
+ startOnNewline: true
+ });
+ offset = props.end;
+ if (!props.found) {
+ if (props.anchor || props.tag || value) {
+ if (value && value.type === 'block-seq')
+ onError(offset, 'BAD_INDENT', 'All sequence items must start at the same column');
+ else
+ onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');
+ }
+ else {
+ // TODO: assert being at last item?
+ if (props.comment)
+ seq.comment = props.comment;
+ continue;
+ }
+ }
+ const node = value
+ ? composeNode(ctx, value, props, onError)
+ : composeEmptyNode(ctx, offset, start, null, props, onError);
+ if (ctx.schema.compat)
+ utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError);
+ offset = node.range[2];
+ seq.items.push(node);
+ }
+ seq.range = [bs.offset, offset, offset];
+ return seq;
+}
+
+exports.resolveBlockSeq = resolveBlockSeq;
diff --git a/node_modules/yaml/dist/compose/resolve-end.d.ts b/node_modules/yaml/dist/compose/resolve-end.d.ts
new file mode 100644
index 0000000..bb2d0b8
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-end.d.ts
@@ -0,0 +1,6 @@
+import type { SourceToken } from '../parse/cst.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function resolveEnd(end: SourceToken[] | undefined, offset: number, reqSpace: boolean, onError: ComposeErrorHandler): {
+ comment: string;
+ offset: number;
+};
diff --git a/node_modules/yaml/dist/compose/resolve-end.js b/node_modules/yaml/dist/compose/resolve-end.js
new file mode 100644
index 0000000..3a58347
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-end.js
@@ -0,0 +1,39 @@
+'use strict';
+
+function resolveEnd(end, offset, reqSpace, onError) {
+ let comment = '';
+ if (end) {
+ let hasSpace = false;
+ let sep = '';
+ for (const token of end) {
+ const { source, type } = token;
+ switch (type) {
+ case 'space':
+ hasSpace = true;
+ break;
+ case 'comment': {
+ if (reqSpace && !hasSpace)
+ onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
+ const cb = source.substring(1) || ' ';
+ if (!comment)
+ comment = cb;
+ else
+ comment += sep + cb;
+ sep = '';
+ break;
+ }
+ case 'newline':
+ if (comment)
+ sep += source;
+ hasSpace = true;
+ break;
+ default:
+ onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);
+ }
+ offset += source.length;
+ }
+ }
+ return { comment, offset };
+}
+
+exports.resolveEnd = resolveEnd;
diff --git a/node_modules/yaml/dist/compose/resolve-flow-collection.d.ts b/node_modules/yaml/dist/compose/resolve-flow-collection.d.ts
new file mode 100644
index 0000000..06d32f5
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-flow-collection.d.ts
@@ -0,0 +1,6 @@
+import { YAMLMap } from '../nodes/YAMLMap.js';
+import { YAMLSeq } from '../nodes/YAMLSeq.js';
+import type { FlowCollection } from '../parse/cst.js';
+import type { ComposeContext, ComposeNode } from './compose-node.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function resolveFlowCollection({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, fc: FlowCollection, onError: ComposeErrorHandler): YAMLMap.Parsed<import("../nodes/Node.js").ParsedNode, import("../nodes/Node.js").ParsedNode | null> | YAMLSeq.Parsed<import("../nodes/Node.js").ParsedNode>;
diff --git a/node_modules/yaml/dist/compose/resolve-flow-collection.js b/node_modules/yaml/dist/compose/resolve-flow-collection.js
new file mode 100644
index 0000000..cf848d9
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-flow-collection.js
@@ -0,0 +1,203 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var Pair = require('../nodes/Pair.js');
+var YAMLMap = require('../nodes/YAMLMap.js');
+var YAMLSeq = require('../nodes/YAMLSeq.js');
+var resolveEnd = require('./resolve-end.js');
+var resolveProps = require('./resolve-props.js');
+var utilContainsNewline = require('./util-contains-newline.js');
+var utilMapIncludes = require('./util-map-includes.js');
+
+const blockMsg = 'Block collections are not allowed within flow collections';
+const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');
+function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) {
+ var _a;
+ const isMap = fc.start.source === '{';
+ const fcName = isMap ? 'flow map' : 'flow sequence';
+ const coll = isMap
+ ? new YAMLMap.YAMLMap(ctx.schema)
+ : new YAMLSeq.YAMLSeq(ctx.schema);
+ coll.flow = true;
+ const atRoot = ctx.atRoot;
+ if (atRoot)
+ ctx.atRoot = false;
+ let offset = fc.offset + fc.start.source.length;
+ for (let i = 0; i < fc.items.length; ++i) {
+ const collItem = fc.items[i];
+ const { start, key, sep, value } = collItem;
+ const props = resolveProps.resolveProps(start, {
+ flow: fcName,
+ indicator: 'explicit-key-ind',
+ next: key !== null && key !== void 0 ? key : sep === null || sep === void 0 ? void 0 : sep[0],
+ offset,
+ onError,
+ startOnNewline: false
+ });
+ if (!props.found) {
+ if (!props.anchor && !props.tag && !sep && !value) {
+ if (i === 0 && props.comma)
+ onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
+ else if (i < fc.items.length - 1)
+ onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);
+ if (props.comment) {
+ if (coll.comment)
+ coll.comment += '\n' + props.comment;
+ else
+ coll.comment = props.comment;
+ }
+ offset = props.end;
+ continue;
+ }
+ if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key))
+ onError(key, // checked by containsNewline()
+ 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
+ }
+ if (i === 0) {
+ if (props.comma)
+ onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
+ }
+ else {
+ if (!props.comma)
+ onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);
+ if (props.comment) {
+ let prevItemComment = '';
+ loop: for (const st of start) {
+ switch (st.type) {
+ case 'comma':
+ case 'space':
+ break;
+ case 'comment':
+ prevItemComment = st.source.substring(1);
+ break loop;
+ default:
+ break loop;
+ }
+ }
+ if (prevItemComment) {
+ let prev = coll.items[coll.items.length - 1];
+ if (Node.isPair(prev))
+ prev = (_a = prev.value) !== null && _a !== void 0 ? _a : prev.key;
+ if (prev.comment)
+ prev.comment += '\n' + prevItemComment;
+ else
+ prev.comment = prevItemComment;
+ props.comment = props.comment.substring(prevItemComment.length + 1);
+ }
+ }
+ }
+ if (!isMap && !sep && !props.found) {
+ // item is a value in a seq
+ // → key & sep are empty, start does not include ? or :
+ const valueNode = value
+ ? composeNode(ctx, value, props, onError)
+ : composeEmptyNode(ctx, props.end, sep, null, props, onError);
+ coll.items.push(valueNode);
+ offset = valueNode.range[2];
+ if (isBlock(value))
+ onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
+ }
+ else {
+ // item is a key+value pair
+ // key value
+ const keyStart = props.end;
+ const keyNode = key
+ ? composeNode(ctx, key, props, onError)
+ : composeEmptyNode(ctx, keyStart, start, null, props, onError);
+ if (isBlock(key))
+ onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
+ // value properties
+ const valueProps = resolveProps.resolveProps(sep !== null && sep !== void 0 ? sep : [], {
+ flow: fcName,
+ indicator: 'map-value-ind',
+ next: value,
+ offset: keyNode.range[2],
+ onError,
+ startOnNewline: false
+ });
+ if (valueProps.found) {
+ if (!isMap && !props.found && ctx.options.strict) {
+ if (sep)
+ for (const st of sep) {
+ if (st === valueProps.found)
+ break;
+ if (st.type === 'newline') {
+ onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
+ break;
+ }
+ }
+ if (props.start < valueProps.found.offset - 1024)
+ onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');
+ }
+ }
+ else if (value) {
+ if ('source' in value && value.source && value.source[0] === ':')
+ onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);
+ else
+ onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);
+ }
+ // value value
+ const valueNode = value
+ ? composeNode(ctx, value, valueProps, onError)
+ : valueProps.found
+ ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)
+ : null;
+ if (valueNode) {
+ if (isBlock(value))
+ onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
+ }
+ else if (valueProps.comment) {
+ if (keyNode.comment)
+ keyNode.comment += '\n' + valueProps.comment;
+ else
+ keyNode.comment = valueProps.comment;
+ }
+ const pair = new Pair.Pair(keyNode, valueNode);
+ if (ctx.options.keepSourceTokens)
+ pair.srcToken = collItem;
+ if (isMap) {
+ const map = coll;
+ if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))
+ onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
+ map.items.push(pair);
+ }
+ else {
+ const map = new YAMLMap.YAMLMap(ctx.schema);
+ map.flow = true;
+ map.items.push(pair);
+ coll.items.push(map);
+ }
+ offset = valueNode ? valueNode.range[2] : valueProps.end;
+ }
+ }
+ const expectedEnd = isMap ? '}' : ']';
+ const [ce, ...ee] = fc.end;
+ let cePos = offset;
+ if (ce && ce.source === expectedEnd)
+ cePos = ce.offset + ce.source.length;
+ else {
+ const name = fcName[0].toUpperCase() + fcName.substring(1);
+ const msg = atRoot
+ ? `${name} must end with a ${expectedEnd}`
+ : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;
+ onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);
+ if (ce && ce.source.length !== 1)
+ ee.unshift(ce);
+ }
+ if (ee.length > 0) {
+ const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError);
+ if (end.comment) {
+ if (coll.comment)
+ coll.comment += '\n' + end.comment;
+ else
+ coll.comment = end.comment;
+ }
+ coll.range = [fc.offset, cePos, end.offset];
+ }
+ else {
+ coll.range = [fc.offset, cePos, cePos];
+ }
+ return coll;
+}
+
+exports.resolveFlowCollection = resolveFlowCollection;
diff --git a/node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts b/node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts
new file mode 100644
index 0000000..0c9204d
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts
@@ -0,0 +1,10 @@
+import { Range } from '../nodes/Node.js';
+import { Scalar } from '../nodes/Scalar.js';
+import type { FlowScalar } from '../parse/cst.js';
+import type { ComposeErrorHandler } from './composer.js';
+export declare function resolveFlowScalar(scalar: FlowScalar, strict: boolean, onError: ComposeErrorHandler): {
+ value: string;
+ type: Scalar.PLAIN | Scalar.QUOTE_DOUBLE | Scalar.QUOTE_SINGLE | null;
+ comment: string;
+ range: Range;
+};
diff --git a/node_modules/yaml/dist/compose/resolve-flow-scalar.js b/node_modules/yaml/dist/compose/resolve-flow-scalar.js
new file mode 100644
index 0000000..f7bf4e5
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-flow-scalar.js
@@ -0,0 +1,226 @@
+'use strict';
+
+var Scalar = require('../nodes/Scalar.js');
+var resolveEnd = require('./resolve-end.js');
+
+function resolveFlowScalar(scalar, strict, onError) {
+ const { offset, type, source, end } = scalar;
+ let _type;
+ let value;
+ const _onError = (rel, code, msg) => onError(offset + rel, code, msg);
+ switch (type) {
+ case 'scalar':
+ _type = Scalar.Scalar.PLAIN;
+ value = plainValue(source, _onError);
+ break;
+ case 'single-quoted-scalar':
+ _type = Scalar.Scalar.QUOTE_SINGLE;
+ value = singleQuotedValue(source, _onError);
+ break;
+ case 'double-quoted-scalar':
+ _type = Scalar.Scalar.QUOTE_DOUBLE;
+ value = doubleQuotedValue(source, _onError);
+ break;
+ /* istanbul ignore next should not happen */
+ default:
+ onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);
+ return {
+ value: '',
+ type: null,
+ comment: '',
+ range: [offset, offset + source.length, offset + source.length]
+ };
+ }
+ const valueEnd = offset + source.length;
+ const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError);
+ return {
+ value,
+ type: _type,
+ comment: re.comment,
+ range: [offset, valueEnd, re.offset]
+ };
+}
+function plainValue(source, onError) {
+ let badChar = '';
+ switch (source[0]) {
+ /* istanbul ignore next should not happen */
+ case '\t':
+ badChar = 'a tab character';
+ break;
+ case ',':
+ badChar = 'flow indicator character ,';
+ break;
+ case '%':
+ badChar = 'directive indicator character %';
+ break;
+ case '|':
+ case '>': {
+ badChar = `block scalar indicator ${source[0]}`;
+ break;
+ }
+ case '@':
+ case '`': {
+ badChar = `reserved character ${source[0]}`;
+ break;
+ }
+ }
+ if (badChar)
+ onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);
+ return foldLines(source);
+}
+function singleQuotedValue(source, onError) {
+ if (source[source.length - 1] !== "'" || source.length === 1)
+ onError(source.length, 'MISSING_CHAR', "Missing closing 'quote");
+ return foldLines(source.slice(1, -1)).replace(/''/g, "'");
+}
+function foldLines(source) {
+ var _a;
+ /**
+ * The negative lookbehind here and in the `re` RegExp is to
+ * prevent causing a polynomial search time in certain cases.
+ *
+ * The try-catch is for Safari, which doesn't support this yet:
+ * https://caniuse.com/js-regexp-lookbehind
+ */
+ let first, line;
+ try {
+ first = new RegExp('(.*?)(?<![ \t])[ \t]*\r?\n', 'sy');
+ line = new RegExp('[ \t]*(.*?)(?:(?<![ \t])[ \t]*)?\r?\n', 'sy');
+ }
+ catch (_) {
+ first = /(.*?)[ \t]*\r?\n/sy;
+ line = /[ \t]*(.*?)[ \t]*\r?\n/sy;
+ }
+ let match = first.exec(source);
+ if (!match)
+ return source;
+ let res = match[1];
+ let sep = ' ';
+ let pos = first.lastIndex;
+ line.lastIndex = pos;
+ while ((match = line.exec(source))) {
+ if (match[1] === '') {
+ if (sep === '\n')
+ res += sep;
+ else
+ sep = '\n';
+ }
+ else {
+ res += sep + match[1];
+ sep = ' ';
+ }
+ pos = line.lastIndex;
+ }
+ const last = /[ \t]*(.*)/sy;
+ last.lastIndex = pos;
+ match = last.exec(source);
+ return res + sep + ((_a = match === null || match === void 0 ? void 0 : match[1]) !== null && _a !== void 0 ? _a : '');
+}
+function doubleQuotedValue(source, onError) {
+ let res = '';
+ for (let i = 1; i < source.length - 1; ++i) {
+ const ch = source[i];
+ if (ch === '\r' && source[i + 1] === '\n')
+ continue;
+ if (ch === '\n') {
+ const { fold, offset } = foldNewline(source, i);
+ res += fold;
+ i = offset;
+ }
+ else if (ch === '\\') {
+ let next = source[++i];
+ const cc = escapeCodes[next];
+ if (cc)
+ res += cc;
+ else if (next === '\n') {
+ // skip escaped newlines, but still trim the following line
+ next = source[i + 1];
+ while (next === ' ' || next === '\t')
+ next = source[++i + 1];
+ }
+ else if (next === '\r' && source[i + 1] === '\n') {
+ // skip escaped CRLF newlines, but still trim the following line
+ next = source[++i + 1];
+ while (next === ' ' || next === '\t')
+ next = source[++i + 1];
+ }
+ else if (next === 'x' || next === 'u' || next === 'U') {
+ const length = { x: 2, u: 4, U: 8 }[next];
+ res += parseCharCode(source, i + 1, length, onError);
+ i += length;
+ }
+ else {
+ const raw = source.substr(i - 1, 2);
+ onError(i - 1, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
+ res += raw;
+ }
+ }
+ else if (ch === ' ' || ch === '\t') {
+ // trim trailing whitespace
+ const wsStart = i;
+ let next = source[i + 1];
+ while (next === ' ' || next === '\t')
+ next = source[++i + 1];
+ if (next !== '\n' && !(next === '\r' && source[i + 2] === '\n'))
+ res += i > wsStart ? source.slice(wsStart, i + 1) : ch;
+ }
+ else {
+ res += ch;
+ }
+ }
+ if (source[source.length - 1] !== '"' || source.length === 1)
+ onError(source.length, 'MISSING_CHAR', 'Missing closing "quote');
+ return res;
+}
+/**
+ * Fold a single newline into a space, multiple newlines to N - 1 newlines.
+ * Presumes `source[offset] === '\n'`
+ */
+function foldNewline(source, offset) {
+ let fold = '';
+ let ch = source[offset + 1];
+ while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') {
+ if (ch === '\r' && source[offset + 2] !== '\n')
+ break;
+ if (ch === '\n')
+ fold += '\n';
+ offset += 1;
+ ch = source[offset + 1];
+ }
+ if (!fold)
+ fold = ' ';
+ return { fold, offset };
+}
+const escapeCodes = {
+ '0': '\0',
+ a: '\x07',
+ b: '\b',
+ e: '\x1b',
+ f: '\f',
+ n: '\n',
+ r: '\r',
+ t: '\t',
+ v: '\v',
+ N: '\u0085',
+ _: '\u00a0',
+ L: '\u2028',
+ P: '\u2029',
+ ' ': ' ',
+ '"': '"',
+ '/': '/',
+ '\\': '\\',
+ '\t': '\t'
+};
+function parseCharCode(source, offset, length, onError) {
+ const cc = source.substr(offset, length);
+ const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);
+ const code = ok ? parseInt(cc, 16) : NaN;
+ if (isNaN(code)) {
+ const raw = source.substr(offset - 2, length + 2);
+ onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
+ return raw;
+ }
+ return String.fromCodePoint(code);
+}
+
+exports.resolveFlowScalar = resolveFlowScalar;
diff --git a/node_modules/yaml/dist/compose/resolve-props.d.ts b/node_modules/yaml/dist/compose/resolve-props.d.ts
new file mode 100644
index 0000000..fba44cf
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-props.d.ts
@@ -0,0 +1,22 @@
+import type { SourceToken, Token } from '../parse/cst.js';
+import type { ComposeErrorHandler } from './composer.js';
+export interface ResolvePropsArg {
+ flow?: 'flow map' | 'flow sequence';
+ indicator: 'doc-start' | 'explicit-key-ind' | 'map-value-ind' | 'seq-item-ind';
+ next: Token | null | undefined;
+ offset: number;
+ onError: ComposeErrorHandler;
+ startOnNewline: boolean;
+}
+export declare function resolveProps(tokens: SourceToken[], { flow, indicator, next, offset, onError, startOnNewline }: ResolvePropsArg): {
+ comma: SourceToken | null;
+ found: SourceToken | null;
+ spaceBefore: boolean;
+ comment: string;
+ hasNewline: boolean;
+ hasNewlineAfterProp: boolean;
+ anchor: SourceToken | null;
+ tag: SourceToken | null;
+ end: number;
+ start: number;
+};
diff --git a/node_modules/yaml/dist/compose/resolve-props.js b/node_modules/yaml/dist/compose/resolve-props.js
new file mode 100644
index 0000000..5281cca
--- /dev/null
+++ b/node_modules/yaml/dist/compose/resolve-props.js
@@ -0,0 +1,136 @@
+'use strict';
+
+function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {
+ let spaceBefore = false;
+ let atNewline = startOnNewline;
+ let hasSpace = startOnNewline;
+ let comment = '';
+ let commentSep = '';
+ let hasNewline = false;
+ let hasNewlineAfterProp = false;
+ let reqSpace = false;
+ let anchor = null;
+ let tag = null;
+ let comma = null;
+ let found = null;
+ let start = null;
+ for (const token of tokens) {
+ if (reqSpace) {
+ if (token.type !== 'space' &&
+ token.type !== 'newline' &&
+ token.type !== 'comma')
+ onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
+ reqSpace = false;
+ }
+ switch (token.type) {
+ case 'space':
+ // At the doc level, tabs at line start may be parsed
+ // as leading white space rather than indentation.
+ // In a flow collection, only the parser handles indent.
+ if (!flow &&
+ atNewline &&
+ indicator !== 'doc-start' &&
+ token.source[0] === '\t')
+ onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
+ hasSpace = true;
+ break;
+ case 'comment': {
+ if (!hasSpace)
+ onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
+ const cb = token.source.substring(1) || ' ';
+ if (!comment)
+ comment = cb;
+ else
+ comment += commentSep + cb;
+ commentSep = '';
+ atNewline = false;
+ break;
+ }
+ case 'newline':
+ if (atNewline) {
+ if (comment)
+ comment += token.source;
+ else
+ spaceBefore = true;
+ }
+ else
+ commentSep += token.source;
+ atNewline = true;
+ hasNewline = true;
+ if (anchor || tag)
+ hasNewlineAfterProp = true;
+ hasSpace = true;
+ break;
+ case 'anchor':
+ if (anchor)
+ onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');
+ if (token.source.endsWith(':'))
+ onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);
+ anchor = token;
+ if (start === null)
+ start = token.offset;
+ atNewline = false;
+ hasSpace = false;
+ reqSpace = true;
+ break;
+ case 'tag': {
+ if (tag)
+ onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');
+ tag = token;
+ if (start === null)
+ start = token.offset;
+ atNewline = false;
+ hasSpace = false;
+ reqSpace = true;
+ break;
+ }
+ case indicator:
+ // Could here handle preceding comments differently
+ if (anchor || tag)
+ onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);
+ if (found)
+ onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow !== null && flow !== void 0 ? flow : 'collection'}`);
+ found = token;
+ atNewline = false;
+ hasSpace = false;
+ break;
+ case 'comma':
+ if (flow) {
+ if (comma)
+ onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);
+ comma = token;
+ atNewline = false;
+ hasSpace = false;
+ break;
+ }
+ // else fallthrough
+ default:
+ onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);
+ atNewline = false;
+ hasSpace = false;
+ }
+ }
+ const last = tokens[tokens.length - 1];
+ const end = last ? last.offset + last.source.length : offset;
+ if (reqSpace &&
+ next &&
+ next.type !== 'space' &&
+ next.type !== 'newline' &&
+ next.type !== 'comma' &&
+ (next.type !== 'scalar' || next.source !== ''))
+ onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
+ return {
+ comma,
+ found,
+ spaceBefore,
+ comment,
+ hasNewline,
+ hasNewlineAfterProp,
+ anchor,
+ tag,
+ end,
+ start: start !== null && start !== void 0 ? start : end
+ };
+}
+
+exports.resolveProps = resolveProps;
diff --git a/node_modules/yaml/dist/compose/util-contains-newline.d.ts b/node_modules/yaml/dist/compose/util-contains-newline.d.ts
new file mode 100644
index 0000000..8155be0
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-contains-newline.d.ts
@@ -0,0 +1,2 @@
+import type { Token } from '../parse/cst.js';
+export declare function containsNewline(key: Token | null | undefined): boolean | null;
diff --git a/node_modules/yaml/dist/compose/util-contains-newline.js b/node_modules/yaml/dist/compose/util-contains-newline.js
new file mode 100644
index 0000000..e7aa82d
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-contains-newline.js
@@ -0,0 +1,36 @@
+'use strict';
+
+function containsNewline(key) {
+ if (!key)
+ return null;
+ switch (key.type) {
+ case 'alias':
+ case 'scalar':
+ case 'double-quoted-scalar':
+ case 'single-quoted-scalar':
+ if (key.source.includes('\n'))
+ return true;
+ if (key.end)
+ for (const st of key.end)
+ if (st.type === 'newline')
+ return true;
+ return false;
+ case 'flow-collection':
+ for (const it of key.items) {
+ for (const st of it.start)
+ if (st.type === 'newline')
+ return true;
+ if (it.sep)
+ for (const st of it.sep)
+ if (st.type === 'newline')
+ return true;
+ if (containsNewline(it.key) || containsNewline(it.value))
+ return true;
+ }
+ return false;
+ default:
+ return true;
+ }
+}
+
+exports.containsNewline = containsNewline;
diff --git a/node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts b/node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts
new file mode 100644
index 0000000..90499b8
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts
@@ -0,0 +1,2 @@
+import type { Token } from '../parse/cst.js';
+export declare function emptyScalarPosition(offset: number, before: Token[] | undefined, pos: number | null): number;
diff --git a/node_modules/yaml/dist/compose/util-empty-scalar-position.js b/node_modules/yaml/dist/compose/util-empty-scalar-position.js
new file mode 100644
index 0000000..32951b3
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-empty-scalar-position.js
@@ -0,0 +1,29 @@
+'use strict';
+
+function emptyScalarPosition(offset, before, pos) {
+ if (before) {
+ if (pos === null)
+ pos = before.length;
+ for (let i = pos - 1; i >= 0; --i) {
+ let st = before[i];
+ switch (st.type) {
+ case 'space':
+ case 'comment':
+ case 'newline':
+ offset -= st.source.length;
+ continue;
+ }
+ // Technically, an empty scalar is immediately after the last non-empty
+ // node, but it's more useful to place it after any whitespace.
+ st = before[++i];
+ while ((st === null || st === void 0 ? void 0 : st.type) === 'space') {
+ offset += st.source.length;
+ st = before[++i];
+ }
+ break;
+ }
+ }
+ return offset;
+}
+
+exports.emptyScalarPosition = emptyScalarPosition;
diff --git a/node_modules/yaml/dist/compose/util-flow-indent-check.d.ts b/node_modules/yaml/dist/compose/util-flow-indent-check.d.ts
new file mode 100644
index 0000000..64ed1fc
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-flow-indent-check.d.ts
@@ -0,0 +1,3 @@
+import { Token } from '../parse/cst';
+import { ComposeErrorHandler } from './composer';
+export declare function flowIndentCheck(indent: number, fc: Token | null | undefined, onError: ComposeErrorHandler): void;
diff --git a/node_modules/yaml/dist/compose/util-flow-indent-check.js b/node_modules/yaml/dist/compose/util-flow-indent-check.js
new file mode 100644
index 0000000..aa7c998
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-flow-indent-check.js
@@ -0,0 +1,17 @@
+'use strict';
+
+var utilContainsNewline = require('./util-contains-newline.js');
+
+function flowIndentCheck(indent, fc, onError) {
+ if ((fc === null || fc === void 0 ? void 0 : fc.type) === 'flow-collection') {
+ const end = fc.end[0];
+ if (end.indent === indent &&
+ (end.source === ']' || end.source === '}') &&
+ utilContainsNewline.containsNewline(fc)) {
+ const msg = 'Flow end indicator should be more indented than parent';
+ onError(end, 'BAD_INDENT', msg, true);
+ }
+ }
+}
+
+exports.flowIndentCheck = flowIndentCheck;
diff --git a/node_modules/yaml/dist/compose/util-map-includes.d.ts b/node_modules/yaml/dist/compose/util-map-includes.d.ts
new file mode 100644
index 0000000..5d2c4b3
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-map-includes.d.ts
@@ -0,0 +1,4 @@
+import { ParsedNode } from '../nodes/Node';
+import { Pair } from '../nodes/Pair';
+import { ComposeContext } from './compose-node';
+export declare function mapIncludes(ctx: ComposeContext, items: Pair<ParsedNode>[], search: ParsedNode): boolean;
diff --git a/node_modules/yaml/dist/compose/util-map-includes.js b/node_modules/yaml/dist/compose/util-map-includes.js
new file mode 100644
index 0000000..ab03be1
--- /dev/null
+++ b/node_modules/yaml/dist/compose/util-map-includes.js
@@ -0,0 +1,19 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+
+function mapIncludes(ctx, items, search) {
+ const { uniqueKeys } = ctx.options;
+ if (uniqueKeys === false)
+ return false;
+ const isEqual = typeof uniqueKeys === 'function'
+ ? uniqueKeys
+ : (a, b) => a === b ||
+ (Node.isScalar(a) &&
+ Node.isScalar(b) &&
+ a.value === b.value &&
+ !(a.value === '<<' && ctx.schema.merge));
+ return items.some(pair => isEqual(pair.key, search));
+}
+
+exports.mapIncludes = mapIncludes;
diff --git a/node_modules/yaml/dist/doc/Document.d.ts b/node_modules/yaml/dist/doc/Document.d.ts
new file mode 100644
index 0000000..a3a7e34
--- /dev/null
+++ b/node_modules/yaml/dist/doc/Document.d.ts
@@ -0,0 +1,139 @@
+import type { YAMLError, YAMLWarning } from '../errors.js';
+import { Alias } from '../nodes/Alias.js';
+import { Node, NODE_TYPE, ParsedNode, Range } from '../nodes/Node.js';
+import { Pair } from '../nodes/Pair.js';
+import type { Scalar } from '../nodes/Scalar.js';
+import type { YAMLMap } from '../nodes/YAMLMap.js';
+import type { YAMLSeq } from '../nodes/YAMLSeq.js';
+import type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from '../options.js';
+import { Schema } from '../schema/Schema.js';
+import { Directives } from './directives.js';
+export declare type Replacer = any[] | ((key: any, value: any) => unknown);
+export declare namespace Document {
+ interface Parsed<T extends ParsedNode = ParsedNode> extends Document<T> {
+ directives: Directives;
+ range: Range;
+ }
+}
+export declare class Document<T = unknown> {
+ readonly [NODE_TYPE]: symbol;
+ /** A comment before this Document */
+ commentBefore: string | null;
+ /** A comment immediately after this Document */
+ comment: string | null;
+ /** The document contents. */
+ contents: T | null;
+ directives?: Directives;
+ /** Errors encountered during parsing. */
+ errors: YAMLError[];
+ options: Required<Omit<ParseOptions & DocumentOptions, 'lineCounter' | 'directives' | 'version'>>;
+ /**
+ * The `[start, value-end, node-end]` character offsets for the part of the
+ * source parsed into this document (undefined if not parsed). The `value-end`
+ * and `node-end` positions are themselves not included in their respective
+ * ranges.
+ */
+ range?: Range;
+ /** The schema used with the document. Use `setSchema()` to change. */
+ schema: Schema;
+ /** Warnings encountered during parsing. */
+ warnings: YAMLWarning[];
+ /**
+ * @param value - The initial value for the document, which will be wrapped
+ * in a Node container.
+ */
+ constructor(value?: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions);
+ constructor(value: any, replacer: null | Replacer, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions);
+ /**
+ * Create a deep copy of this Document and its contents.
+ *
+ * Custom Node values that inherit from `Object` still refer to their original instances.
+ */
+ clone(): Document<T>;
+ /** Adds a value to the document. */
+ add(value: any): void;
+ /** Adds a value to the document. */
+ addIn(path: Iterable<unknown>, value: unknown): void;
+ /**
+ * Create a new `Alias` node, ensuring that the target `node` has the required anchor.
+ *
+ * If `node` already has an anchor, `name` is ignored.
+ * Otherwise, the `node.anchor` value will be set to `name`,
+ * or if an anchor with that name is already present in the document,
+ * `name` will be used as a prefix for a new unique anchor.
+ * If `name` is undefined, the generated anchor will use 'a' as a prefix.
+ */
+ createAlias(node: Scalar | YAMLMap | YAMLSeq, name?: string): Alias;
+ /**
+ * Convert any value into a `Node` using the current schema, recursively
+ * turning objects into collections.
+ */
+ createNode(value: unknown, options?: CreateNodeOptions): Node;
+ createNode(value: unknown, replacer: Replacer | CreateNodeOptions | null, options?: CreateNodeOptions): Node;
+ /**
+ * Convert a key and a value into a `Pair` using the current schema,
+ * recursively wrapping all values as `Scalar` or `Collection` nodes.
+ */
+ createPair<K extends Node = Node, V extends Node = Node>(key: unknown, value: unknown, options?: CreateNodeOptions): Pair<K, V>;
+ /**
+ * Removes a value from the document.
+ * @returns `true` if the item was found and removed.
+ */
+ delete(key: any): boolean;
+ /**
+ * Removes a value from the document.
+ * @returns `true` if the item was found and removed.
+ */
+ deleteIn(path: Iterable<unknown>): boolean;
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ get(key: unknown, keepScalar?: boolean): unknown;
+ /**
+ * Returns item at `path`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ getIn(path: Iterable<unknown>, keepScalar?: boolean): unknown;
+ /**
+ * Checks if the document includes a value with the key `key`.
+ */
+ has(key: unknown): boolean;
+ /**
+ * Checks if the document includes a value at `path`.
+ */
+ hasIn(path: Iterable<unknown>): boolean;
+ /**
+ * Sets a value in this document. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ set(key: any, value: unknown): void;
+ /**
+ * Sets a value in this document. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ setIn(path: Iterable<unknown>, value: unknown): void;
+ /**
+ * Change the YAML version and schema used by the document.
+ * A `null` version disables support for directives, explicit tags, anchors, and aliases.
+ * It also requires the `schema` option to be given as a `Schema` instance value.
+ *
+ * Overrides all previously set schema options.
+ */
+ setSchema(version: '1.1' | '1.2' | 'next' | null, options?: SchemaOptions): void;
+ /** A plain JavaScript representation of the document `contents`. */
+ toJS(opt?: ToJSOptions & {
+ [ignored: string]: unknown;
+ }): any;
+ /**
+ * A JSON representation of the document `contents`.
+ *
+ * @param jsonArg Used by `JSON.stringify` to indicate the array index or
+ * property name.
+ */
+ toJSON(jsonArg?: string | null, onAnchor?: ToJSOptions['onAnchor']): any;
+ /** A YAML representation of the document. */
+ toString(options?: ToStringOptions): string;
+}
diff --git a/node_modules/yaml/dist/doc/Document.js b/node_modules/yaml/dist/doc/Document.js
new file mode 100644
index 0000000..d38d080
--- /dev/null
+++ b/node_modules/yaml/dist/doc/Document.js
@@ -0,0 +1,334 @@
+'use strict';
+
+var Alias = require('../nodes/Alias.js');
+var Collection = require('../nodes/Collection.js');
+var Node = require('../nodes/Node.js');
+var Pair = require('../nodes/Pair.js');
+var toJS = require('../nodes/toJS.js');
+var Schema = require('../schema/Schema.js');
+var stringify = require('../stringify/stringify.js');
+var stringifyDocument = require('../stringify/stringifyDocument.js');
+var anchors = require('./anchors.js');
+var applyReviver = require('./applyReviver.js');
+var createNode = require('./createNode.js');
+var directives = require('./directives.js');
+
+class Document {
+ constructor(value, replacer, options) {
+ /** A comment before this Document */
+ this.commentBefore = null;
+ /** A comment immediately after this Document */
+ this.comment = null;
+ /** Errors encountered during parsing. */
+ this.errors = [];
+ /** Warnings encountered during parsing. */
+ this.warnings = [];
+ Object.defineProperty(this, Node.NODE_TYPE, { value: Node.DOC });
+ let _replacer = null;
+ if (typeof replacer === 'function' || Array.isArray(replacer)) {
+ _replacer = replacer;
+ }
+ else if (options === undefined && replacer) {
+ options = replacer;
+ replacer = undefined;
+ }
+ const opt = Object.assign({
+ intAsBigInt: false,
+ keepSourceTokens: false,
+ logLevel: 'warn',
+ prettyErrors: true,
+ strict: true,
+ uniqueKeys: true,
+ version: '1.2'
+ }, options);
+ this.options = opt;
+ let { version } = opt;
+ if (options === null || options === void 0 ? void 0 : options.directives) {
+ this.directives = options.directives.atDocument();
+ if (this.directives.yaml.explicit)
+ version = this.directives.yaml.version;
+ }
+ else
+ this.directives = new directives.Directives({ version });
+ this.setSchema(version, options);
+ if (value === undefined)
+ this.contents = null;
+ else {
+ this.contents = this.createNode(value, _replacer, options);
+ }
+ }
+ /**
+ * Create a deep copy of this Document and its contents.
+ *
+ * Custom Node values that inherit from `Object` still refer to their original instances.
+ */
+ clone() {
+ const copy = Object.create(Document.prototype, {
+ [Node.NODE_TYPE]: { value: Node.DOC }
+ });
+ copy.commentBefore = this.commentBefore;
+ copy.comment = this.comment;
+ copy.errors = this.errors.slice();
+ copy.warnings = this.warnings.slice();
+ copy.options = Object.assign({}, this.options);
+ if (this.directives)
+ copy.directives = this.directives.clone();
+ copy.schema = this.schema.clone();
+ copy.contents = Node.isNode(this.contents)
+ ? this.contents.clone(copy.schema)
+ : this.contents;
+ if (this.range)
+ copy.range = this.range.slice();
+ return copy;
+ }
+ /** Adds a value to the document. */
+ add(value) {
+ if (assertCollection(this.contents))
+ this.contents.add(value);
+ }
+ /** Adds a value to the document. */
+ addIn(path, value) {
+ if (assertCollection(this.contents))
+ this.contents.addIn(path, value);
+ }
+ /**
+ * Create a new `Alias` node, ensuring that the target `node` has the required anchor.
+ *
+ * If `node` already has an anchor, `name` is ignored.
+ * Otherwise, the `node.anchor` value will be set to `name`,
+ * or if an anchor with that name is already present in the document,
+ * `name` will be used as a prefix for a new unique anchor.
+ * If `name` is undefined, the generated anchor will use 'a' as a prefix.
+ */
+ createAlias(node, name) {
+ if (!node.anchor) {
+ const prev = anchors.anchorNames(this);
+ node.anchor =
+ // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
+ !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name;
+ }
+ return new Alias.Alias(node.anchor);
+ }
+ createNode(value, replacer, options) {
+ let _replacer = undefined;
+ if (typeof replacer === 'function') {
+ value = replacer.call({ '': value }, '', value);
+ _replacer = replacer;
+ }
+ else if (Array.isArray(replacer)) {
+ const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;
+ const asStr = replacer.filter(keyToStr).map(String);
+ if (asStr.length > 0)
+ replacer = replacer.concat(asStr);
+ _replacer = replacer;
+ }
+ else if (options === undefined && replacer) {
+ options = replacer;
+ replacer = undefined;
+ }
+ const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options !== null && options !== void 0 ? options : {};
+ const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this,
+ // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
+ anchorPrefix || 'a');
+ const ctx = {
+ aliasDuplicateObjects: aliasDuplicateObjects !== null && aliasDuplicateObjects !== void 0 ? aliasDuplicateObjects : true,
+ keepUndefined: keepUndefined !== null && keepUndefined !== void 0 ? keepUndefined : false,
+ onAnchor,
+ onTagObj,
+ replacer: _replacer,
+ schema: this.schema,
+ sourceObjects
+ };
+ const node = createNode.createNode(value, tag, ctx);
+ if (flow && Node.isCollection(node))
+ node.flow = true;
+ setAnchors();
+ return node;
+ }
+ /**
+ * Convert a key and a value into a `Pair` using the current schema,
+ * recursively wrapping all values as `Scalar` or `Collection` nodes.
+ */
+ createPair(key, value, options = {}) {
+ const k = this.createNode(key, null, options);
+ const v = this.createNode(value, null, options);
+ return new Pair.Pair(k, v);
+ }
+ /**
+ * Removes a value from the document.
+ * @returns `true` if the item was found and removed.
+ */
+ delete(key) {
+ return assertCollection(this.contents) ? this.contents.delete(key) : false;
+ }
+ /**
+ * Removes a value from the document.
+ * @returns `true` if the item was found and removed.
+ */
+ deleteIn(path) {
+ if (Collection.isEmptyPath(path)) {
+ if (this.contents == null)
+ return false;
+ this.contents = null;
+ return true;
+ }
+ return assertCollection(this.contents)
+ ? this.contents.deleteIn(path)
+ : false;
+ }
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ get(key, keepScalar) {
+ return Node.isCollection(this.contents)
+ ? this.contents.get(key, keepScalar)
+ : undefined;
+ }
+ /**
+ * Returns item at `path`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ getIn(path, keepScalar) {
+ if (Collection.isEmptyPath(path))
+ return !keepScalar && Node.isScalar(this.contents)
+ ? this.contents.value
+ : this.contents;
+ return Node.isCollection(this.contents)
+ ? this.contents.getIn(path, keepScalar)
+ : undefined;
+ }
+ /**
+ * Checks if the document includes a value with the key `key`.
+ */
+ has(key) {
+ return Node.isCollection(this.contents) ? this.contents.has(key) : false;
+ }
+ /**
+ * Checks if the document includes a value at `path`.
+ */
+ hasIn(path) {
+ if (Collection.isEmptyPath(path))
+ return this.contents !== undefined;
+ return Node.isCollection(this.contents) ? this.contents.hasIn(path) : false;
+ }
+ /**
+ * Sets a value in this document. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ set(key, value) {
+ if (this.contents == null) {
+ this.contents = Collection.collectionFromPath(this.schema, [key], value);
+ }
+ else if (assertCollection(this.contents)) {
+ this.contents.set(key, value);
+ }
+ }
+ /**
+ * Sets a value in this document. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ setIn(path, value) {
+ if (Collection.isEmptyPath(path))
+ this.contents = value;
+ else if (this.contents == null) {
+ this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value);
+ }
+ else if (assertCollection(this.contents)) {
+ this.contents.setIn(path, value);
+ }
+ }
+ /**
+ * Change the YAML version and schema used by the document.
+ * A `null` version disables support for directives, explicit tags, anchors, and aliases.
+ * It also requires the `schema` option to be given as a `Schema` instance value.
+ *
+ * Overrides all previously set schema options.
+ */
+ setSchema(version, options = {}) {
+ if (typeof version === 'number')
+ version = String(version);
+ let opt;
+ switch (version) {
+ case '1.1':
+ if (this.directives)
+ this.directives.yaml.version = '1.1';
+ else
+ this.directives = new directives.Directives({ version: '1.1' });
+ opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };
+ break;
+ case '1.2':
+ case 'next':
+ if (this.directives)
+ this.directives.yaml.version = version;
+ else
+ this.directives = new directives.Directives({ version });
+ opt = { merge: false, resolveKnownTags: true, schema: 'core' };
+ break;
+ case null:
+ if (this.directives)
+ delete this.directives;
+ opt = null;
+ break;
+ default: {
+ const sv = JSON.stringify(version);
+ throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);
+ }
+ }
+ // Not using `instanceof Schema` to allow for duck typing
+ if (options.schema instanceof Object)
+ this.schema = options.schema;
+ else if (opt)
+ this.schema = new Schema.Schema(Object.assign(opt, options));
+ else
+ throw new Error(`With a null YAML version, the { schema: Schema } option is required`);
+ }
+ // json & jsonArg are only used from toJSON()
+ toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
+ const ctx = {
+ anchors: new Map(),
+ doc: this,
+ keep: !json,
+ mapAsMap: mapAsMap === true,
+ mapKeyWarned: false,
+ maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,
+ stringify: stringify.stringify
+ };
+ const res = toJS.toJS(this.contents, jsonArg !== null && jsonArg !== void 0 ? jsonArg : '', ctx);
+ if (typeof onAnchor === 'function')
+ for (const { count, res } of ctx.anchors.values())
+ onAnchor(res, count);
+ return typeof reviver === 'function'
+ ? applyReviver.applyReviver(reviver, { '': res }, '', res)
+ : res;
+ }
+ /**
+ * A JSON representation of the document `contents`.
+ *
+ * @param jsonArg Used by `JSON.stringify` to indicate the array index or
+ * property name.
+ */
+ toJSON(jsonArg, onAnchor) {
+ return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });
+ }
+ /** A YAML representation of the document. */
+ toString(options = {}) {
+ if (this.errors.length > 0)
+ throw new Error('Document with errors cannot be stringified');
+ if ('indent' in options &&
+ (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {
+ const s = JSON.stringify(options.indent);
+ throw new Error(`"indent" option must be a positive integer, not ${s}`);
+ }
+ return stringifyDocument.stringifyDocument(this, options);
+ }
+}
+function assertCollection(contents) {
+ if (Node.isCollection(contents))
+ return true;
+ throw new Error('Expected a YAML collection as document contents');
+}
+
+exports.Document = Document;
diff --git a/node_modules/yaml/dist/doc/anchors.d.ts b/node_modules/yaml/dist/doc/anchors.d.ts
new file mode 100644
index 0000000..d321574
--- /dev/null
+++ b/node_modules/yaml/dist/doc/anchors.d.ts
@@ -0,0 +1,24 @@
+import { Node } from '../nodes/Node.js';
+import type { Document } from './Document.js';
+/**
+ * Verify that the input string is a valid anchor.
+ *
+ * Will throw on errors.
+ */
+export declare function anchorIsValid(anchor: string): true;
+export declare function anchorNames(root: Document | Node): Set<string>;
+/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */
+export declare function findNewAnchor(prefix: string, exclude: Set<string>): string;
+export declare function createNodeAnchors(doc: Document, prefix: string): {
+ onAnchor: (source: unknown) => string;
+ /**
+ * With circular references, the source node is only resolved after all
+ * of its child nodes are. This is why anchors are set only after all of
+ * the nodes have been created.
+ */
+ setAnchors: () => void;
+ sourceObjects: Map<unknown, {
+ anchor: string | null;
+ node: Node | null;
+ }>;
+};
diff --git a/node_modules/yaml/dist/doc/anchors.js b/node_modules/yaml/dist/doc/anchors.js
new file mode 100644
index 0000000..5c51bdb
--- /dev/null
+++ b/node_modules/yaml/dist/doc/anchors.js
@@ -0,0 +1,77 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var visit = require('../visit.js');
+
+/**
+ * Verify that the input string is a valid anchor.
+ *
+ * Will throw on errors.
+ */
+function anchorIsValid(anchor) {
+ if (/[\x00-\x19\s,[\]{}]/.test(anchor)) {
+ const sa = JSON.stringify(anchor);
+ const msg = `Anchor must not contain whitespace or control characters: ${sa}`;
+ throw new Error(msg);
+ }
+ return true;
+}
+function anchorNames(root) {
+ const anchors = new Set();
+ visit.visit(root, {
+ Value(_key, node) {
+ if (node.anchor)
+ anchors.add(node.anchor);
+ }
+ });
+ return anchors;
+}
+/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */
+function findNewAnchor(prefix, exclude) {
+ for (let i = 1; true; ++i) {
+ const name = `${prefix}${i}`;
+ if (!exclude.has(name))
+ return name;
+ }
+}
+function createNodeAnchors(doc, prefix) {
+ const aliasObjects = [];
+ const sourceObjects = new Map();
+ let prevAnchors = null;
+ return {
+ onAnchor: (source) => {
+ aliasObjects.push(source);
+ if (!prevAnchors)
+ prevAnchors = anchorNames(doc);
+ const anchor = findNewAnchor(prefix, prevAnchors);
+ prevAnchors.add(anchor);
+ return anchor;
+ },
+ /**
+ * With circular references, the source node is only resolved after all
+ * of its child nodes are. This is why anchors are set only after all of
+ * the nodes have been created.
+ */
+ setAnchors: () => {
+ for (const source of aliasObjects) {
+ const ref = sourceObjects.get(source);
+ if (typeof ref === 'object' &&
+ ref.anchor &&
+ (Node.isScalar(ref.node) || Node.isCollection(ref.node))) {
+ ref.node.anchor = ref.anchor;
+ }
+ else {
+ const error = new Error('Failed to resolve repeated object (this should not happen)');
+ error.source = source;
+ throw error;
+ }
+ }
+ },
+ sourceObjects
+ };
+}
+
+exports.anchorIsValid = anchorIsValid;
+exports.anchorNames = anchorNames;
+exports.createNodeAnchors = createNodeAnchors;
+exports.findNewAnchor = findNewAnchor;
diff --git a/node_modules/yaml/dist/doc/applyReviver.d.ts b/node_modules/yaml/dist/doc/applyReviver.d.ts
new file mode 100644
index 0000000..ac28f2c
--- /dev/null
+++ b/node_modules/yaml/dist/doc/applyReviver.d.ts
@@ -0,0 +1,9 @@
+export declare type Reviver = (key: unknown, value: unknown) => unknown;
+/**
+ * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
+ * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
+ * 2021 edition: https://tc39.es/ecma262/#sec-json.parse
+ *
+ * Includes extensions for handling Map and Set objects.
+ */
+export declare function applyReviver(reviver: Reviver, obj: unknown, key: unknown, val: any): unknown;
diff --git a/node_modules/yaml/dist/doc/applyReviver.js b/node_modules/yaml/dist/doc/applyReviver.js
new file mode 100644
index 0000000..8734579
--- /dev/null
+++ b/node_modules/yaml/dist/doc/applyReviver.js
@@ -0,0 +1,56 @@
+'use strict';
+
+/**
+ * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
+ * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
+ * 2021 edition: https://tc39.es/ecma262/#sec-json.parse
+ *
+ * Includes extensions for handling Map and Set objects.
+ */
+function applyReviver(reviver, obj, key, val) {
+ if (val && typeof val === 'object') {
+ if (Array.isArray(val)) {
+ for (let i = 0, len = val.length; i < len; ++i) {
+ const v0 = val[i];
+ const v1 = applyReviver(reviver, val, String(i), v0);
+ if (v1 === undefined)
+ delete val[i];
+ else if (v1 !== v0)
+ val[i] = v1;
+ }
+ }
+ else if (val instanceof Map) {
+ for (const k of Array.from(val.keys())) {
+ const v0 = val.get(k);
+ const v1 = applyReviver(reviver, val, k, v0);
+ if (v1 === undefined)
+ val.delete(k);
+ else if (v1 !== v0)
+ val.set(k, v1);
+ }
+ }
+ else if (val instanceof Set) {
+ for (const v0 of Array.from(val)) {
+ const v1 = applyReviver(reviver, val, v0, v0);
+ if (v1 === undefined)
+ val.delete(v0);
+ else if (v1 !== v0) {
+ val.delete(v0);
+ val.add(v1);
+ }
+ }
+ }
+ else {
+ for (const [k, v0] of Object.entries(val)) {
+ const v1 = applyReviver(reviver, val, k, v0);
+ if (v1 === undefined)
+ delete val[k];
+ else if (v1 !== v0)
+ val[k] = v1;
+ }
+ }
+ }
+ return reviver.call(obj, key, val);
+}
+
+exports.applyReviver = applyReviver;
diff --git a/node_modules/yaml/dist/doc/createNode.d.ts b/node_modules/yaml/dist/doc/createNode.d.ts
new file mode 100644
index 0000000..cfbd35c
--- /dev/null
+++ b/node_modules/yaml/dist/doc/createNode.d.ts
@@ -0,0 +1,17 @@
+import { Node } from '../nodes/Node.js';
+import type { Schema } from '../schema/Schema.js';
+import type { CollectionTag, ScalarTag } from '../schema/types.js';
+import type { Replacer } from './Document.js';
+export interface CreateNodeContext {
+ aliasDuplicateObjects: boolean;
+ keepUndefined: boolean;
+ onAnchor: (source: unknown) => string;
+ onTagObj?: (tagObj: ScalarTag | CollectionTag) => void;
+ sourceObjects: Map<unknown, {
+ anchor: string | null;
+ node: Node | null;
+ }>;
+ replacer?: Replacer;
+ schema: Schema;
+}
+export declare function createNode(value: unknown, tagName: string | undefined, ctx: CreateNodeContext): Node;
diff --git a/node_modules/yaml/dist/doc/createNode.js b/node_modules/yaml/dist/doc/createNode.js
new file mode 100644
index 0000000..801a611
--- /dev/null
+++ b/node_modules/yaml/dist/doc/createNode.js
@@ -0,0 +1,89 @@
+'use strict';
+
+var Alias = require('../nodes/Alias.js');
+var Node = require('../nodes/Node.js');
+var Scalar = require('../nodes/Scalar.js');
+
+const defaultTagPrefix = 'tag:yaml.org,2002:';
+function findTagObject(value, tagName, tags) {
+ var _a;
+ if (tagName) {
+ const match = tags.filter(t => t.tag === tagName);
+ const tagObj = (_a = match.find(t => !t.format)) !== null && _a !== void 0 ? _a : match[0];
+ if (!tagObj)
+ throw new Error(`Tag ${tagName} not found`);
+ return tagObj;
+ }
+ return tags.find(t => { var _a; return ((_a = t.identify) === null || _a === void 0 ? void 0 : _a.call(t, value)) && !t.format; });
+}
+function createNode(value, tagName, ctx) {
+ var _a, _b;
+ if (Node.isDocument(value))
+ value = value.contents;
+ if (Node.isNode(value))
+ return value;
+ if (Node.isPair(value)) {
+ const map = (_b = (_a = ctx.schema[Node.MAP]).createNode) === null || _b === void 0 ? void 0 : _b.call(_a, ctx.schema, null, ctx);
+ map.items.push(value);
+ return map;
+ }
+ if (value instanceof String ||
+ value instanceof Number ||
+ value instanceof Boolean ||
+ (typeof BigInt === 'function' && value instanceof BigInt) // not supported everywhere
+ ) {
+ // https://tc39.es/ecma262/#sec-serializejsonproperty
+ value = value.valueOf();
+ }
+ const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;
+ // Detect duplicate references to the same object & use Alias nodes for all
+ // after first. The `ref` wrapper allows for circular references to resolve.
+ let ref = undefined;
+ if (aliasDuplicateObjects && value && typeof value === 'object') {
+ ref = sourceObjects.get(value);
+ if (ref) {
+ if (!ref.anchor)
+ ref.anchor = onAnchor(value);
+ return new Alias.Alias(ref.anchor);
+ }
+ else {
+ ref = { anchor: null, node: null };
+ sourceObjects.set(value, ref);
+ }
+ }
+ if (tagName === null || tagName === void 0 ? void 0 : tagName.startsWith('!!'))
+ tagName = defaultTagPrefix + tagName.slice(2);
+ let tagObj = findTagObject(value, tagName, schema.tags);
+ if (!tagObj) {
+ if (value && typeof value.toJSON === 'function') {
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-call
+ value = value.toJSON();
+ }
+ if (!value || typeof value !== 'object') {
+ const node = new Scalar.Scalar(value);
+ if (ref)
+ ref.node = node;
+ return node;
+ }
+ tagObj =
+ value instanceof Map
+ ? schema[Node.MAP]
+ : Symbol.iterator in Object(value)
+ ? schema[Node.SEQ]
+ : schema[Node.MAP];
+ }
+ if (onTagObj) {
+ onTagObj(tagObj);
+ delete ctx.onTagObj;
+ }
+ const node = (tagObj === null || tagObj === void 0 ? void 0 : tagObj.createNode)
+ ? tagObj.createNode(ctx.schema, value, ctx)
+ : new Scalar.Scalar(value);
+ if (tagName)
+ node.tag = tagName;
+ if (ref)
+ ref.node = node;
+ return node;
+}
+
+exports.createNode = createNode;
diff --git a/node_modules/yaml/dist/doc/directives.d.ts b/node_modules/yaml/dist/doc/directives.d.ts
new file mode 100644
index 0000000..ff8a2cb
--- /dev/null
+++ b/node_modules/yaml/dist/doc/directives.d.ts
@@ -0,0 +1,49 @@
+import type { Document } from './Document.js';
+export declare class Directives {
+ static defaultYaml: Directives['yaml'];
+ static defaultTags: Directives['tags'];
+ yaml: {
+ version: '1.1' | '1.2' | 'next';
+ explicit?: boolean;
+ };
+ tags: Record<string, string>;
+ /**
+ * The directives-end/doc-start marker `---`. If `null`, a marker may still be
+ * included in the document's stringified representation.
+ */
+ docStart: true | null;
+ /** The doc-end marker `...`. */
+ docEnd: boolean;
+ /**
+ * Used when parsing YAML 1.1, where:
+ * > If the document specifies no directives, it is parsed using the same
+ * > settings as the previous document. If the document does specify any
+ * > directives, all directives of previous documents, if any, are ignored.
+ */
+ private atNextDocument?;
+ constructor(yaml?: Directives['yaml'], tags?: Directives['tags']);
+ clone(): Directives;
+ /**
+ * During parsing, get a Directives instance for the current document and
+ * update the stream state according to the current version's spec.
+ */
+ atDocument(): Directives;
+ /**
+ * @param onError - May be called even if the action was successful
+ * @returns `true` on success
+ */
+ add(line: string, onError: (offset: number, message: string, warning?: boolean) => void): boolean;
+ /**
+ * Resolves a tag, matching handles to those defined in %TAG directives.
+ *
+ * @returns Resolved tag, which may also be the non-specific tag `'!'` or a
+ * `'!local'` tag, or `null` if unresolvable.
+ */
+ tagName(source: string, onError: (message: string) => void): string | null;
+ /**
+ * Given a fully resolved tag, returns its printable string form,
+ * taking into account current tag prefixes and defaults.
+ */
+ tagString(tag: string): string;
+ toString(doc?: Document): string;
+}
diff --git a/node_modules/yaml/dist/doc/directives.js b/node_modules/yaml/dist/doc/directives.js
new file mode 100644
index 0000000..0fa911c
--- /dev/null
+++ b/node_modules/yaml/dist/doc/directives.js
@@ -0,0 +1,171 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var visit = require('../visit.js');
+
+const escapeChars = {
+ '!': '%21',
+ ',': '%2C',
+ '[': '%5B',
+ ']': '%5D',
+ '{': '%7B',
+ '}': '%7D'
+};
+const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]);
+class Directives {
+ constructor(yaml, tags) {
+ /**
+ * The directives-end/doc-start marker `---`. If `null`, a marker may still be
+ * included in the document's stringified representation.
+ */
+ this.docStart = null;
+ /** The doc-end marker `...`. */
+ this.docEnd = false;
+ this.yaml = Object.assign({}, Directives.defaultYaml, yaml);
+ this.tags = Object.assign({}, Directives.defaultTags, tags);
+ }
+ clone() {
+ const copy = new Directives(this.yaml, this.tags);
+ copy.docStart = this.docStart;
+ return copy;
+ }
+ /**
+ * During parsing, get a Directives instance for the current document and
+ * update the stream state according to the current version's spec.
+ */
+ atDocument() {
+ const res = new Directives(this.yaml, this.tags);
+ switch (this.yaml.version) {
+ case '1.1':
+ this.atNextDocument = true;
+ break;
+ case '1.2':
+ this.atNextDocument = false;
+ this.yaml = {
+ explicit: Directives.defaultYaml.explicit,
+ version: '1.2'
+ };
+ this.tags = Object.assign({}, Directives.defaultTags);
+ break;
+ }
+ return res;
+ }
+ /**
+ * @param onError - May be called even if the action was successful
+ * @returns `true` on success
+ */
+ add(line, onError) {
+ if (this.atNextDocument) {
+ this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };
+ this.tags = Object.assign({}, Directives.defaultTags);
+ this.atNextDocument = false;
+ }
+ const parts = line.trim().split(/[ \t]+/);
+ const name = parts.shift();
+ switch (name) {
+ case '%TAG': {
+ if (parts.length !== 2) {
+ onError(0, '%TAG directive should contain exactly two parts');
+ if (parts.length < 2)
+ return false;
+ }
+ const [handle, prefix] = parts;
+ this.tags[handle] = prefix;
+ return true;
+ }
+ case '%YAML': {
+ this.yaml.explicit = true;
+ if (parts.length !== 1) {
+ onError(0, '%YAML directive should contain exactly one part');
+ return false;
+ }
+ const [version] = parts;
+ if (version === '1.1' || version === '1.2') {
+ this.yaml.version = version;
+ return true;
+ }
+ else {
+ const isValid = /^\d+\.\d+$/.test(version);
+ onError(6, `Unsupported YAML version ${version}`, isValid);
+ return false;
+ }
+ }
+ default:
+ onError(0, `Unknown directive ${name}`, true);
+ return false;
+ }
+ }
+ /**
+ * Resolves a tag, matching handles to those defined in %TAG directives.
+ *
+ * @returns Resolved tag, which may also be the non-specific tag `'!'` or a
+ * `'!local'` tag, or `null` if unresolvable.
+ */
+ tagName(source, onError) {
+ if (source === '!')
+ return '!'; // non-specific tag
+ if (source[0] !== '!') {
+ onError(`Not a valid tag: ${source}`);
+ return null;
+ }
+ if (source[1] === '<') {
+ const verbatim = source.slice(2, -1);
+ if (verbatim === '!' || verbatim === '!!') {
+ onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);
+ return null;
+ }
+ if (source[source.length - 1] !== '>')
+ onError('Verbatim tags must end with a >');
+ return verbatim;
+ }
+ const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);
+ if (!suffix)
+ onError(`The ${source} tag has no suffix`);
+ const prefix = this.tags[handle];
+ if (prefix)
+ return prefix + decodeURIComponent(suffix);
+ if (handle === '!')
+ return source; // local tag
+ onError(`Could not resolve tag: ${source}`);
+ return null;
+ }
+ /**
+ * Given a fully resolved tag, returns its printable string form,
+ * taking into account current tag prefixes and defaults.
+ */
+ tagString(tag) {
+ for (const [handle, prefix] of Object.entries(this.tags)) {
+ if (tag.startsWith(prefix))
+ return handle + escapeTagName(tag.substring(prefix.length));
+ }
+ return tag[0] === '!' ? tag : `!<${tag}>`;
+ }
+ toString(doc) {
+ const lines = this.yaml.explicit
+ ? [`%YAML ${this.yaml.version || '1.2'}`]
+ : [];
+ const tagEntries = Object.entries(this.tags);
+ let tagNames;
+ if (doc && tagEntries.length > 0 && Node.isNode(doc.contents)) {
+ const tags = {};
+ visit.visit(doc.contents, (_key, node) => {
+ if (Node.isNode(node) && node.tag)
+ tags[node.tag] = true;
+ });
+ tagNames = Object.keys(tags);
+ }
+ else
+ tagNames = [];
+ for (const [handle, prefix] of tagEntries) {
+ if (handle === '!!' && prefix === 'tag:yaml.org,2002:')
+ continue;
+ if (!doc || tagNames.some(tn => tn.startsWith(prefix)))
+ lines.push(`%TAG ${handle} ${prefix}`);
+ }
+ return lines.join('\n');
+ }
+}
+Directives.defaultYaml = { explicit: false, version: '1.2' };
+Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' };
+
+exports.Directives = Directives;
diff --git a/node_modules/yaml/dist/errors.d.ts b/node_modules/yaml/dist/errors.d.ts
new file mode 100644
index 0000000..1ea1797
--- /dev/null
+++ b/node_modules/yaml/dist/errors.d.ts
@@ -0,0 +1,21 @@
+import type { LineCounter } from './parse/line-counter';
+export declare type ErrorCode = 'ALIAS_PROPS' | 'BAD_ALIAS' | 'BAD_DIRECTIVE' | 'BAD_DQ_ESCAPE' | 'BAD_INDENT' | 'BAD_PROP_ORDER' | 'BAD_SCALAR_START' | 'BLOCK_AS_IMPLICIT_KEY' | 'BLOCK_IN_FLOW' | 'DUPLICATE_KEY' | 'IMPOSSIBLE' | 'KEY_OVER_1024_CHARS' | 'MISSING_CHAR' | 'MULTILINE_IMPLICIT_KEY' | 'MULTIPLE_ANCHORS' | 'MULTIPLE_DOCS' | 'MULTIPLE_TAGS' | 'TAB_AS_INDENT' | 'TAG_RESOLVE_FAILED' | 'UNEXPECTED_TOKEN';
+export declare type LinePos = {
+ line: number;
+ col: number;
+};
+export declare class YAMLError extends Error {
+ name: 'YAMLParseError' | 'YAMLWarning';
+ code: ErrorCode;
+ message: string;
+ pos: [number, number];
+ linePos?: [LinePos] | [LinePos, LinePos];
+ constructor(name: YAMLError['name'], pos: [number, number], code: ErrorCode, message: string);
+}
+export declare class YAMLParseError extends YAMLError {
+ constructor(pos: [number, number], code: ErrorCode, message: string);
+}
+export declare class YAMLWarning extends YAMLError {
+ constructor(pos: [number, number], code: ErrorCode, message: string);
+}
+export declare const prettifyError: (src: string, lc: LineCounter) => (error: YAMLError) => void;
diff --git a/node_modules/yaml/dist/errors.js b/node_modules/yaml/dist/errors.js
new file mode 100644
index 0000000..39f49fa
--- /dev/null
+++ b/node_modules/yaml/dist/errors.js
@@ -0,0 +1,62 @@
+'use strict';
+
+class YAMLError extends Error {
+ constructor(name, pos, code, message) {
+ super();
+ this.name = name;
+ this.code = code;
+ this.message = message;
+ this.pos = pos;
+ }
+}
+class YAMLParseError extends YAMLError {
+ constructor(pos, code, message) {
+ super('YAMLParseError', pos, code, message);
+ }
+}
+class YAMLWarning extends YAMLError {
+ constructor(pos, code, message) {
+ super('YAMLWarning', pos, code, message);
+ }
+}
+const prettifyError = (src, lc) => (error) => {
+ if (error.pos[0] === -1)
+ return;
+ error.linePos = error.pos.map(pos => lc.linePos(pos));
+ const { line, col } = error.linePos[0];
+ error.message += ` at line ${line}, column ${col}`;
+ let ci = col - 1;
+ let lineStr = src
+ .substring(lc.lineStarts[line - 1], lc.lineStarts[line])
+ .replace(/[\n\r]+$/, '');
+ // Trim to max 80 chars, keeping col position near the middle
+ if (ci >= 60 && lineStr.length > 80) {
+ const trimStart = Math.min(ci - 39, lineStr.length - 79);
+ lineStr = '…' + lineStr.substring(trimStart);
+ ci -= trimStart - 1;
+ }
+ if (lineStr.length > 80)
+ lineStr = lineStr.substring(0, 79) + '…';
+ // Include previous line in context if pointing at line start
+ if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {
+ // Regexp won't match if start is trimmed
+ let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);
+ if (prev.length > 80)
+ prev = prev.substring(0, 79) + '…\n';
+ lineStr = prev + lineStr;
+ }
+ if (/[^ ]/.test(lineStr)) {
+ let count = 1;
+ const end = error.linePos[1];
+ if (end && end.line === line && end.col > col) {
+ count = Math.min(end.col - col, 80 - ci);
+ }
+ const pointer = ' '.repeat(ci) + '^'.repeat(count);
+ error.message += `:\n\n${lineStr}\n${pointer}\n`;
+ }
+};
+
+exports.YAMLError = YAMLError;
+exports.YAMLParseError = YAMLParseError;
+exports.YAMLWarning = YAMLWarning;
+exports.prettifyError = prettifyError;
diff --git a/node_modules/yaml/dist/index.d.ts b/node_modules/yaml/dist/index.d.ts
new file mode 100644
index 0000000..de561ca
--- /dev/null
+++ b/node_modules/yaml/dist/index.d.ts
@@ -0,0 +1,19 @@
+export { Composer } from './compose/composer.js';
+export { Document } from './doc/Document.js';
+export { Schema } from './schema/Schema.js';
+export { ErrorCode, YAMLError, YAMLParseError, YAMLWarning } from './errors.js';
+export { Alias } from './nodes/Alias.js';
+export { isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq, Node, ParsedNode, Range } from './nodes/Node.js';
+export { Pair } from './nodes/Pair.js';
+export { Scalar } from './nodes/Scalar.js';
+export { YAMLMap } from './nodes/YAMLMap.js';
+export { YAMLSeq } from './nodes/YAMLSeq.js';
+export type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from './options.js';
+export * as CST from './parse/cst.js';
+export { Lexer } from './parse/lexer.js';
+export { LineCounter } from './parse/line-counter.js';
+export { Parser } from './parse/parser.js';
+export { EmptyStream, parse, parseAllDocuments, parseDocument, stringify } from './public-api.js';
+export type { TagId, Tags } from './schema/tags';
+export type { CollectionTag, ScalarTag } from './schema/types';
+export { asyncVisitor, asyncVisitorFn, visit, visitAsync, visitor, visitorFn } from './visit.js';
diff --git a/node_modules/yaml/dist/index.js b/node_modules/yaml/dist/index.js
new file mode 100644
index 0000000..b013fae
--- /dev/null
+++ b/node_modules/yaml/dist/index.js
@@ -0,0 +1,50 @@
+'use strict';
+
+var composer = require('./compose/composer.js');
+var Document = require('./doc/Document.js');
+var Schema = require('./schema/Schema.js');
+var errors = require('./errors.js');
+var Alias = require('./nodes/Alias.js');
+var Node = require('./nodes/Node.js');
+var Pair = require('./nodes/Pair.js');
+var Scalar = require('./nodes/Scalar.js');
+var YAMLMap = require('./nodes/YAMLMap.js');
+var YAMLSeq = require('./nodes/YAMLSeq.js');
+var cst = require('./parse/cst.js');
+var lexer = require('./parse/lexer.js');
+var lineCounter = require('./parse/line-counter.js');
+var parser = require('./parse/parser.js');
+var publicApi = require('./public-api.js');
+var visit = require('./visit.js');
+
+
+
+exports.Composer = composer.Composer;
+exports.Document = Document.Document;
+exports.Schema = Schema.Schema;
+exports.YAMLError = errors.YAMLError;
+exports.YAMLParseError = errors.YAMLParseError;
+exports.YAMLWarning = errors.YAMLWarning;
+exports.Alias = Alias.Alias;
+exports.isAlias = Node.isAlias;
+exports.isCollection = Node.isCollection;
+exports.isDocument = Node.isDocument;
+exports.isMap = Node.isMap;
+exports.isNode = Node.isNode;
+exports.isPair = Node.isPair;
+exports.isScalar = Node.isScalar;
+exports.isSeq = Node.isSeq;
+exports.Pair = Pair.Pair;
+exports.Scalar = Scalar.Scalar;
+exports.YAMLMap = YAMLMap.YAMLMap;
+exports.YAMLSeq = YAMLSeq.YAMLSeq;
+exports.CST = cst;
+exports.Lexer = lexer.Lexer;
+exports.LineCounter = lineCounter.LineCounter;
+exports.Parser = parser.Parser;
+exports.parse = publicApi.parse;
+exports.parseAllDocuments = publicApi.parseAllDocuments;
+exports.parseDocument = publicApi.parseDocument;
+exports.stringify = publicApi.stringify;
+exports.visit = visit.visit;
+exports.visitAsync = visit.visitAsync;
diff --git a/node_modules/yaml/dist/log.d.ts b/node_modules/yaml/dist/log.d.ts
new file mode 100644
index 0000000..d5f4e07
--- /dev/null
+++ b/node_modules/yaml/dist/log.d.ts
@@ -0,0 +1,3 @@
+export declare type LogLevelId = 'silent' | 'error' | 'warn' | 'debug';
+export declare function debug(logLevel: LogLevelId, ...messages: any[]): void;
+export declare function warn(logLevel: LogLevelId, warning: string | Error): void;
diff --git a/node_modules/yaml/dist/log.js b/node_modules/yaml/dist/log.js
new file mode 100644
index 0000000..fac7d5a
--- /dev/null
+++ b/node_modules/yaml/dist/log.js
@@ -0,0 +1,17 @@
+'use strict';
+
+function debug(logLevel, ...messages) {
+ if (logLevel === 'debug')
+ console.log(...messages);
+}
+function warn(logLevel, warning) {
+ if (logLevel === 'debug' || logLevel === 'warn') {
+ if (typeof process !== 'undefined' && process.emitWarning)
+ process.emitWarning(warning);
+ else
+ console.warn(warning);
+ }
+}
+
+exports.debug = debug;
+exports.warn = warn;
diff --git a/node_modules/yaml/dist/node_modules/tslib/tslib.es6.js b/node_modules/yaml/dist/node_modules/tslib/tslib.es6.js
new file mode 100644
index 0000000..a7580c3
--- /dev/null
+++ b/node_modules/yaml/dist/node_modules/tslib/tslib.es6.js
@@ -0,0 +1,76 @@
+'use strict';
+
+/*! *****************************************************************************
+Copyright (c) Microsoft Corporation.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
+***************************************************************************** */
+/* global Reflect, Promise */
+
+var extendStatics = function(d, b) {
+ extendStatics = Object.setPrototypeOf ||
+ ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+ function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
+ return extendStatics(d, b);
+};
+
+function __extends(d, b) {
+ if (typeof b !== "function" && b !== null)
+ throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
+ extendStatics(d, b);
+ function __() { this.constructor = d; }
+ d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+}
+
+function __generator(thisArg, body) {
+ var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
+ return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
+ function verb(n) { return function (v) { return step([n, v]); }; }
+ function step(op) {
+ if (f) throw new TypeError("Generator is already executing.");
+ while (_) try {
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
+ if (y = 0, t) op = [op[0] & 2, t.value];
+ switch (op[0]) {
+ case 0: case 1: t = op; break;
+ case 4: _.label++; return { value: op[1], done: false };
+ case 5: _.label++; y = op[1]; op = [0]; continue;
+ case 7: op = _.ops.pop(); _.trys.pop(); continue;
+ default:
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
+ if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
+ if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
+ if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
+ if (t[2]) _.ops.pop();
+ _.trys.pop(); continue;
+ }
+ op = body.call(thisArg, _);
+ } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
+ if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
+ }
+}
+
+function __values(o) {
+ var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
+ if (m) return m.call(o);
+ if (o && typeof o.length === "number") return {
+ next: function () {
+ if (o && i >= o.length) o = void 0;
+ return { value: o && o[i++], done: !o };
+ }
+ };
+ throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
+}
+
+exports.__extends = __extends;
+exports.__generator = __generator;
+exports.__values = __values;
diff --git a/node_modules/yaml/dist/nodes/Alias.d.ts b/node_modules/yaml/dist/nodes/Alias.d.ts
new file mode 100644
index 0000000..cdb0f42
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Alias.d.ts
@@ -0,0 +1,28 @@
+import type { Document } from '../doc/Document.js';
+import type { FlowScalar } from '../parse/cst.js';
+import type { StringifyContext } from '../stringify/stringify.js';
+import { NodeBase, Range } from './Node.js';
+import type { Scalar } from './Scalar';
+import type { ToJSContext } from './toJS.js';
+import type { YAMLMap } from './YAMLMap.js';
+import type { YAMLSeq } from './YAMLSeq.js';
+export declare namespace Alias {
+ interface Parsed extends Alias {
+ range: Range;
+ srcToken?: FlowScalar & {
+ type: 'alias';
+ };
+ }
+}
+export declare class Alias extends NodeBase {
+ source: string;
+ anchor?: never;
+ constructor(source: string);
+ /**
+ * Resolve the value of this alias within `doc`, finding the last
+ * instance of the `source` anchor before this node.
+ */
+ resolve(doc: Document): Scalar | YAMLMap | YAMLSeq | undefined;
+ toJSON(_arg?: unknown, ctx?: ToJSContext): unknown;
+ toString(ctx?: StringifyContext, _onComment?: () => void, _onChompKeep?: () => void): string;
+}
diff --git a/node_modules/yaml/dist/nodes/Alias.js b/node_modules/yaml/dist/nodes/Alias.js
new file mode 100644
index 0000000..75853cf
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Alias.js
@@ -0,0 +1,96 @@
+'use strict';
+
+var anchors = require('../doc/anchors.js');
+var visit = require('../visit.js');
+var Node = require('./Node.js');
+
+class Alias extends Node.NodeBase {
+ constructor(source) {
+ super(Node.ALIAS);
+ this.source = source;
+ Object.defineProperty(this, 'tag', {
+ set() {
+ throw new Error('Alias nodes cannot have tags');
+ }
+ });
+ }
+ /**
+ * Resolve the value of this alias within `doc`, finding the last
+ * instance of the `source` anchor before this node.
+ */
+ resolve(doc) {
+ let found = undefined;
+ visit.visit(doc, {
+ Node: (_key, node) => {
+ if (node === this)
+ return visit.visit.BREAK;
+ if (node.anchor === this.source)
+ found = node;
+ }
+ });
+ return found;
+ }
+ toJSON(_arg, ctx) {
+ if (!ctx)
+ return { source: this.source };
+ const { anchors, doc, maxAliasCount } = ctx;
+ const source = this.resolve(doc);
+ if (!source) {
+ const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
+ throw new ReferenceError(msg);
+ }
+ const data = anchors.get(source);
+ /* istanbul ignore if */
+ if (!data || data.res === undefined) {
+ const msg = 'This should not happen: Alias anchor was not resolved?';
+ throw new ReferenceError(msg);
+ }
+ if (maxAliasCount >= 0) {
+ data.count += 1;
+ if (data.aliasCount === 0)
+ data.aliasCount = getAliasCount(doc, source, anchors);
+ if (data.count * data.aliasCount > maxAliasCount) {
+ const msg = 'Excessive alias count indicates a resource exhaustion attack';
+ throw new ReferenceError(msg);
+ }
+ }
+ return data.res;
+ }
+ toString(ctx, _onComment, _onChompKeep) {
+ const src = `*${this.source}`;
+ if (ctx) {
+ anchors.anchorIsValid(this.source);
+ if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {
+ const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
+ throw new Error(msg);
+ }
+ if (ctx.implicitKey)
+ return `${src} `;
+ }
+ return src;
+ }
+}
+function getAliasCount(doc, node, anchors) {
+ if (Node.isAlias(node)) {
+ const source = node.resolve(doc);
+ const anchor = anchors && source && anchors.get(source);
+ return anchor ? anchor.count * anchor.aliasCount : 0;
+ }
+ else if (Node.isCollection(node)) {
+ let count = 0;
+ for (const item of node.items) {
+ const c = getAliasCount(doc, item, anchors);
+ if (c > count)
+ count = c;
+ }
+ return count;
+ }
+ else if (Node.isPair(node)) {
+ const kc = getAliasCount(doc, node.key, anchors);
+ const vc = getAliasCount(doc, node.value, anchors);
+ return Math.max(kc, vc);
+ }
+ return 1;
+}
+
+exports.Alias = Alias;
diff --git a/node_modules/yaml/dist/nodes/Collection.d.ts b/node_modules/yaml/dist/nodes/Collection.d.ts
new file mode 100644
index 0000000..3515c8f
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Collection.d.ts
@@ -0,0 +1,73 @@
+import type { Schema } from '../schema/Schema.js';
+import { NodeBase, NODE_TYPE } from './Node.js';
+export declare function collectionFromPath(schema: Schema, path: unknown[], value: unknown): import("./Node.js").Node;
+export declare const isEmptyPath: (path: Iterable<unknown> | null | undefined) => boolean;
+export declare abstract class Collection extends NodeBase {
+ static maxFlowStringSingleLineLength: number;
+ schema: Schema | undefined;
+ [NODE_TYPE]: symbol;
+ items: unknown[];
+ /** An optional anchor on this node. Used by alias nodes. */
+ anchor?: string;
+ /**
+ * If true, stringify this and all child nodes using flow rather than
+ * block styles.
+ */
+ flow?: boolean;
+ constructor(type: symbol, schema?: Schema);
+ /**
+ * Create a copy of this collection.
+ *
+ * @param schema - If defined, overwrites the original's schema
+ */
+ clone(schema?: Schema): Collection;
+ /** Adds a value to the collection. */
+ abstract add(value: unknown): void;
+ /**
+ * Removes a value from the collection.
+ * @returns `true` if the item was found and removed.
+ */
+ abstract delete(key: unknown): boolean;
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ abstract get(key: unknown, keepScalar?: boolean): unknown;
+ /**
+ * Checks if the collection includes a value with the key `key`.
+ */
+ abstract has(key: unknown): boolean;
+ /**
+ * Sets a value in this collection. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ abstract set(key: unknown, value: unknown): void;
+ /**
+ * Adds a value to the collection. For `!!map` and `!!omap` the value must
+ * be a Pair instance or a `{ key, value }` object, which may not have a key
+ * that already exists in the map.
+ */
+ addIn(path: Iterable<unknown>, value: unknown): void;
+ /**
+ * Removes a value from the collection.
+ * @returns `true` if the item was found and removed.
+ */
+ deleteIn(path: Iterable<unknown>): boolean;
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ getIn(path: Iterable<unknown>, keepScalar?: boolean): unknown;
+ hasAllNullValues(allowScalar?: boolean): boolean;
+ /**
+ * Checks if the collection includes a value with the key `key`.
+ */
+ hasIn(path: Iterable<unknown>): boolean;
+ /**
+ * Sets a value in this collection. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ setIn(path: Iterable<unknown>, value: unknown): void;
+}
diff --git a/node_modules/yaml/dist/nodes/Collection.js b/node_modules/yaml/dist/nodes/Collection.js
new file mode 100644
index 0000000..3c56597
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Collection.js
@@ -0,0 +1,150 @@
+'use strict';
+
+var createNode = require('../doc/createNode.js');
+var Node = require('./Node.js');
+
+function collectionFromPath(schema, path, value) {
+ let v = value;
+ for (let i = path.length - 1; i >= 0; --i) {
+ const k = path[i];
+ if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {
+ const a = [];
+ a[k] = v;
+ v = a;
+ }
+ else {
+ v = new Map([[k, v]]);
+ }
+ }
+ return createNode.createNode(v, undefined, {
+ aliasDuplicateObjects: false,
+ keepUndefined: false,
+ onAnchor: () => {
+ throw new Error('This should not happen, please report a bug.');
+ },
+ schema,
+ sourceObjects: new Map()
+ });
+}
+// null, undefined, or an empty non-string iterable (e.g. [])
+const isEmptyPath = (path) => path == null ||
+ (typeof path === 'object' && !!path[Symbol.iterator]().next().done);
+class Collection extends Node.NodeBase {
+ constructor(type, schema) {
+ super(type);
+ Object.defineProperty(this, 'schema', {
+ value: schema,
+ configurable: true,
+ enumerable: false,
+ writable: true
+ });
+ }
+ /**
+ * Create a copy of this collection.
+ *
+ * @param schema - If defined, overwrites the original's schema
+ */
+ clone(schema) {
+ const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
+ if (schema)
+ copy.schema = schema;
+ copy.items = copy.items.map(it => Node.isNode(it) || Node.isPair(it) ? it.clone(schema) : it);
+ if (this.range)
+ copy.range = this.range.slice();
+ return copy;
+ }
+ /**
+ * Adds a value to the collection. For `!!map` and `!!omap` the value must
+ * be a Pair instance or a `{ key, value }` object, which may not have a key
+ * that already exists in the map.
+ */
+ addIn(path, value) {
+ if (isEmptyPath(path))
+ this.add(value);
+ else {
+ const [key, ...rest] = path;
+ const node = this.get(key, true);
+ if (Node.isCollection(node))
+ node.addIn(rest, value);
+ else if (node === undefined && this.schema)
+ this.set(key, collectionFromPath(this.schema, rest, value));
+ else
+ throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
+ }
+ }
+ /**
+ * Removes a value from the collection.
+ * @returns `true` if the item was found and removed.
+ */
+ deleteIn(path) {
+ const [key, ...rest] = path;
+ if (rest.length === 0)
+ return this.delete(key);
+ const node = this.get(key, true);
+ if (Node.isCollection(node))
+ return node.deleteIn(rest);
+ else
+ throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
+ }
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ */
+ getIn(path, keepScalar) {
+ const [key, ...rest] = path;
+ const node = this.get(key, true);
+ if (rest.length === 0)
+ return !keepScalar && Node.isScalar(node) ? node.value : node;
+ else
+ return Node.isCollection(node) ? node.getIn(rest, keepScalar) : undefined;
+ }
+ hasAllNullValues(allowScalar) {
+ return this.items.every(node => {
+ if (!Node.isPair(node))
+ return false;
+ const n = node.value;
+ return (n == null ||
+ (allowScalar &&
+ Node.isScalar(n) &&
+ n.value == null &&
+ !n.commentBefore &&
+ !n.comment &&
+ !n.tag));
+ });
+ }
+ /**
+ * Checks if the collection includes a value with the key `key`.
+ */
+ hasIn(path) {
+ const [key, ...rest] = path;
+ if (rest.length === 0)
+ return this.has(key);
+ const node = this.get(key, true);
+ return Node.isCollection(node) ? node.hasIn(rest) : false;
+ }
+ /**
+ * Sets a value in this collection. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ */
+ setIn(path, value) {
+ const [key, ...rest] = path;
+ if (rest.length === 0) {
+ this.set(key, value);
+ }
+ else {
+ const node = this.get(key, true);
+ if (Node.isCollection(node))
+ node.setIn(rest, value);
+ else if (node === undefined && this.schema)
+ this.set(key, collectionFromPath(this.schema, rest, value));
+ else
+ throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
+ }
+ }
+}
+Collection.maxFlowStringSingleLineLength = 60;
+
+exports.Collection = Collection;
+exports.collectionFromPath = collectionFromPath;
+exports.isEmptyPath = isEmptyPath;
diff --git a/node_modules/yaml/dist/nodes/Node.d.ts b/node_modules/yaml/dist/nodes/Node.d.ts
new file mode 100644
index 0000000..719c65b
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Node.d.ts
@@ -0,0 +1,53 @@
+import type { Document } from '../doc/Document.js';
+import { Token } from '../parse/cst.js';
+import type { StringifyContext } from '../stringify/stringify.js';
+import type { Alias } from './Alias.js';
+import type { Pair } from './Pair.js';
+import type { Scalar } from './Scalar.js';
+import type { YAMLMap } from './YAMLMap.js';
+import type { YAMLSeq } from './YAMLSeq.js';
+export declare type Node = Alias | Scalar | YAMLMap | YAMLSeq;
+export declare type ParsedNode = Alias.Parsed | Scalar.Parsed | YAMLMap.Parsed | YAMLSeq.Parsed;
+export declare type Range = [number, number, number];
+export declare const ALIAS: unique symbol;
+export declare const DOC: unique symbol;
+export declare const MAP: unique symbol;
+export declare const PAIR: unique symbol;
+export declare const SCALAR: unique symbol;
+export declare const SEQ: unique symbol;
+export declare const NODE_TYPE: unique symbol;
+export declare const isAlias: (node: any) => node is Alias;
+export declare const isDocument: (node: any) => node is Document<unknown>;
+export declare const isMap: (node: any) => node is YAMLMap<unknown, unknown>;
+export declare const isPair: (node: any) => node is Pair<unknown, unknown>;
+export declare const isScalar: (node: any) => node is Scalar<unknown>;
+export declare const isSeq: (node: any) => node is YAMLSeq<unknown>;
+export declare function isCollection(node: any): node is YAMLMap | YAMLSeq;
+export declare function isNode(node: any): node is Node;
+export declare const hasAnchor: (node: unknown) => node is Scalar<unknown> | YAMLMap<unknown, unknown> | YAMLSeq<unknown>;
+export declare abstract class NodeBase {
+ readonly [NODE_TYPE]: symbol;
+ /** A comment on or immediately after this */
+ comment?: string | null;
+ /** A comment before this */
+ commentBefore?: string | null;
+ /**
+ * The `[start, value-end, node-end]` character offsets for the part of the
+ * source parsed into this node (undefined if not parsed). The `value-end`
+ * and `node-end` positions are themselves not included in their respective
+ * ranges.
+ */
+ range?: Range | null;
+ /** A blank line before this node and its commentBefore */
+ spaceBefore?: boolean;
+ /** The CST token that was composed into this node. */
+ srcToken?: Token;
+ /** A fully qualified tag, if required */
+ tag?: string;
+ /** A plain JS representation of this node */
+ abstract toJSON(): any;
+ abstract toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
+ constructor(type: symbol);
+ /** Create a copy of this node. */
+ clone(): NodeBase;
+}
diff --git a/node_modules/yaml/dist/nodes/Node.js b/node_modules/yaml/dist/nodes/Node.js
new file mode 100644
index 0000000..4ddff6a
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Node.js
@@ -0,0 +1,66 @@
+'use strict';
+
+const ALIAS = Symbol.for('yaml.alias');
+const DOC = Symbol.for('yaml.document');
+const MAP = Symbol.for('yaml.map');
+const PAIR = Symbol.for('yaml.pair');
+const SCALAR = Symbol.for('yaml.scalar');
+const SEQ = Symbol.for('yaml.seq');
+const NODE_TYPE = Symbol.for('yaml.node.type');
+const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;
+const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;
+const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;
+const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;
+const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;
+const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;
+function isCollection(node) {
+ if (node && typeof node === 'object')
+ switch (node[NODE_TYPE]) {
+ case MAP:
+ case SEQ:
+ return true;
+ }
+ return false;
+}
+function isNode(node) {
+ if (node && typeof node === 'object')
+ switch (node[NODE_TYPE]) {
+ case ALIAS:
+ case MAP:
+ case SCALAR:
+ case SEQ:
+ return true;
+ }
+ return false;
+}
+const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;
+class NodeBase {
+ constructor(type) {
+ Object.defineProperty(this, NODE_TYPE, { value: type });
+ }
+ /** Create a copy of this node. */
+ clone() {
+ const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
+ if (this.range)
+ copy.range = this.range.slice();
+ return copy;
+ }
+}
+
+exports.ALIAS = ALIAS;
+exports.DOC = DOC;
+exports.MAP = MAP;
+exports.NODE_TYPE = NODE_TYPE;
+exports.NodeBase = NodeBase;
+exports.PAIR = PAIR;
+exports.SCALAR = SCALAR;
+exports.SEQ = SEQ;
+exports.hasAnchor = hasAnchor;
+exports.isAlias = isAlias;
+exports.isCollection = isCollection;
+exports.isDocument = isDocument;
+exports.isMap = isMap;
+exports.isNode = isNode;
+exports.isPair = isPair;
+exports.isScalar = isScalar;
+exports.isSeq = isSeq;
diff --git a/node_modules/yaml/dist/nodes/Pair.d.ts b/node_modules/yaml/dist/nodes/Pair.d.ts
new file mode 100644
index 0000000..da3d968
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Pair.d.ts
@@ -0,0 +1,21 @@
+import { CreateNodeContext } from '../doc/createNode.js';
+import type { CollectionItem } from '../parse/cst.js';
+import type { Schema } from '../schema/Schema.js';
+import type { StringifyContext } from '../stringify/stringify.js';
+import { addPairToJSMap } from './addPairToJSMap.js';
+import { NODE_TYPE } from './Node.js';
+import type { ToJSContext } from './toJS.js';
+export declare function createPair(key: unknown, value: unknown, ctx: CreateNodeContext): Pair<import("./Node.js").Node, import("./Alias.js").Alias | import("./Scalar.js").Scalar<unknown> | import("./YAMLMap.js").YAMLMap<unknown, unknown> | import("./YAMLSeq.js").YAMLSeq<unknown>>;
+export declare class Pair<K = unknown, V = unknown> {
+ readonly [NODE_TYPE]: symbol;
+ /** Always Node or null when parsed, but can be set to anything. */
+ key: K;
+ /** Always Node or null when parsed, but can be set to anything. */
+ value: V | null;
+ /** The CST token that was composed into this pair. */
+ srcToken?: CollectionItem;
+ constructor(key: K, value?: V | null);
+ clone(schema?: Schema): Pair<K, V>;
+ toJSON(_?: unknown, ctx?: ToJSContext): ReturnType<typeof addPairToJSMap>;
+ toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
+}
diff --git a/node_modules/yaml/dist/nodes/Pair.js b/node_modules/yaml/dist/nodes/Pair.js
new file mode 100644
index 0000000..c0c22e9
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Pair.js
@@ -0,0 +1,39 @@
+'use strict';
+
+var createNode = require('../doc/createNode.js');
+var stringifyPair = require('../stringify/stringifyPair.js');
+var addPairToJSMap = require('./addPairToJSMap.js');
+var Node = require('./Node.js');
+
+function createPair(key, value, ctx) {
+ const k = createNode.createNode(key, undefined, ctx);
+ const v = createNode.createNode(value, undefined, ctx);
+ return new Pair(k, v);
+}
+class Pair {
+ constructor(key, value = null) {
+ Object.defineProperty(this, Node.NODE_TYPE, { value: Node.PAIR });
+ this.key = key;
+ this.value = value;
+ }
+ clone(schema) {
+ let { key, value } = this;
+ if (Node.isNode(key))
+ key = key.clone(schema);
+ if (Node.isNode(value))
+ value = value.clone(schema);
+ return new Pair(key, value);
+ }
+ toJSON(_, ctx) {
+ const pair = (ctx === null || ctx === void 0 ? void 0 : ctx.mapAsMap) ? new Map() : {};
+ return addPairToJSMap.addPairToJSMap(ctx, pair, this);
+ }
+ toString(ctx, onComment, onChompKeep) {
+ return (ctx === null || ctx === void 0 ? void 0 : ctx.doc)
+ ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep)
+ : JSON.stringify(this);
+ }
+}
+
+exports.Pair = Pair;
+exports.createPair = createPair;
diff --git a/node_modules/yaml/dist/nodes/Scalar.d.ts b/node_modules/yaml/dist/nodes/Scalar.d.ts
new file mode 100644
index 0000000..dd330b2
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Scalar.d.ts
@@ -0,0 +1,42 @@
+import type { BlockScalar, FlowScalar } from '../parse/cst.js';
+import { NodeBase, Range } from './Node.js';
+import { ToJSContext } from './toJS.js';
+export declare const isScalarValue: (value: unknown) => boolean;
+export declare namespace Scalar {
+ interface Parsed extends Scalar {
+ range: Range;
+ source: string;
+ srcToken?: FlowScalar | BlockScalar;
+ }
+ type BLOCK_FOLDED = 'BLOCK_FOLDED';
+ type BLOCK_LITERAL = 'BLOCK_LITERAL';
+ type PLAIN = 'PLAIN';
+ type QUOTE_DOUBLE = 'QUOTE_DOUBLE';
+ type QUOTE_SINGLE = 'QUOTE_SINGLE';
+ type Type = BLOCK_FOLDED | BLOCK_LITERAL | PLAIN | QUOTE_DOUBLE | QUOTE_SINGLE;
+}
+export declare class Scalar<T = unknown> extends NodeBase {
+ static readonly BLOCK_FOLDED = "BLOCK_FOLDED";
+ static readonly BLOCK_LITERAL = "BLOCK_LITERAL";
+ static readonly PLAIN = "PLAIN";
+ static readonly QUOTE_DOUBLE = "QUOTE_DOUBLE";
+ static readonly QUOTE_SINGLE = "QUOTE_SINGLE";
+ value: T;
+ /** An optional anchor on this node. Used by alias nodes. */
+ anchor?: string;
+ /**
+ * By default (undefined), numbers use decimal notation.
+ * The YAML 1.2 core schema only supports 'HEX' and 'OCT'.
+ * The YAML 1.1 schema also supports 'BIN' and 'TIME'
+ */
+ format?: string;
+ /** If `value` is a number, use this value when stringifying this node. */
+ minFractionDigits?: number;
+ /** Set during parsing to the source string value */
+ source?: string;
+ /** The scalar style used for the node's string representation */
+ type?: Scalar.Type;
+ constructor(value: T);
+ toJSON(arg?: any, ctx?: ToJSContext): any;
+ toString(): string;
+}
diff --git a/node_modules/yaml/dist/nodes/Scalar.js b/node_modules/yaml/dist/nodes/Scalar.js
new file mode 100644
index 0000000..2d46471
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/Scalar.js
@@ -0,0 +1,26 @@
+'use strict';
+
+var Node = require('./Node.js');
+var toJS = require('./toJS.js');
+
+const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');
+class Scalar extends Node.NodeBase {
+ constructor(value) {
+ super(Node.SCALAR);
+ this.value = value;
+ }
+ toJSON(arg, ctx) {
+ return (ctx === null || ctx === void 0 ? void 0 : ctx.keep) ? this.value : toJS.toJS(this.value, arg, ctx);
+ }
+ toString() {
+ return String(this.value);
+ }
+}
+Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED';
+Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL';
+Scalar.PLAIN = 'PLAIN';
+Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';
+Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE';
+
+exports.Scalar = Scalar;
+exports.isScalarValue = isScalarValue;
diff --git a/node_modules/yaml/dist/nodes/YAMLMap.d.ts b/node_modules/yaml/dist/nodes/YAMLMap.d.ts
new file mode 100644
index 0000000..f13d7e5
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/YAMLMap.d.ts
@@ -0,0 +1,43 @@
+import type { BlockMap, FlowCollection } from '../parse/cst.js';
+import type { Schema } from '../schema/Schema.js';
+import type { StringifyContext } from '../stringify/stringify.js';
+import { Collection } from './Collection.js';
+import { ParsedNode, Range } from './Node.js';
+import { Pair } from './Pair.js';
+import type { ToJSContext } from './toJS.js';
+export declare function findPair<K = unknown, V = unknown>(items: Iterable<Pair<K, V>>, key: unknown): Pair<K, V> | undefined;
+export declare namespace YAMLMap {
+ interface Parsed<K extends ParsedNode = ParsedNode, V extends ParsedNode | null = ParsedNode | null> extends YAMLMap<K, V> {
+ items: Pair<K, V>[];
+ range: Range;
+ srcToken?: BlockMap | FlowCollection;
+ }
+}
+export declare class YAMLMap<K = unknown, V = unknown> extends Collection {
+ static get tagName(): 'tag:yaml.org,2002:map';
+ items: Pair<K, V>[];
+ constructor(schema?: Schema);
+ /**
+ * Adds a value to the collection.
+ *
+ * @param overwrite - If not set `true`, using a key that is already in the
+ * collection will throw. Otherwise, overwrites the previous value.
+ */
+ add(pair: Pair<K, V> | {
+ key: K;
+ value: V;
+ }, overwrite?: boolean): void;
+ delete(key: K): boolean;
+ get(key: K, keepScalar?: boolean): unknown;
+ has(key: K): boolean;
+ set(key: K, value: V): void;
+ /**
+ * @param ctx - Conversion context, originally set in Document#toJS()
+ * @param {Class} Type - If set, forces the returned collection type
+ * @returns Instance of Type, Map, or Object
+ */
+ toJSON<T = unknown>(_?: unknown, ctx?: ToJSContext, Type?: {
+ new (): T;
+ }): any;
+ toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
+}
diff --git a/node_modules/yaml/dist/nodes/YAMLMap.js b/node_modules/yaml/dist/nodes/YAMLMap.js
new file mode 100644
index 0000000..9b4a990
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/YAMLMap.js
@@ -0,0 +1,120 @@
+'use strict';
+
+var stringifyCollection = require('../stringify/stringifyCollection.js');
+var addPairToJSMap = require('./addPairToJSMap.js');
+var Collection = require('./Collection.js');
+var Node = require('./Node.js');
+var Pair = require('./Pair.js');
+var Scalar = require('./Scalar.js');
+
+function findPair(items, key) {
+ const k = Node.isScalar(key) ? key.value : key;
+ for (const it of items) {
+ if (Node.isPair(it)) {
+ if (it.key === key || it.key === k)
+ return it;
+ if (Node.isScalar(it.key) && it.key.value === k)
+ return it;
+ }
+ }
+ return undefined;
+}
+class YAMLMap extends Collection.Collection {
+ constructor(schema) {
+ super(Node.MAP, schema);
+ this.items = [];
+ }
+ static get tagName() {
+ return 'tag:yaml.org,2002:map';
+ }
+ /**
+ * Adds a value to the collection.
+ *
+ * @param overwrite - If not set `true`, using a key that is already in the
+ * collection will throw. Otherwise, overwrites the previous value.
+ */
+ add(pair, overwrite) {
+ var _a;
+ let _pair;
+ if (Node.isPair(pair))
+ _pair = pair;
+ else if (!pair || typeof pair !== 'object' || !('key' in pair)) {
+ // In TypeScript, this never happens.
+ _pair = new Pair.Pair(pair, pair.value);
+ }
+ else
+ _pair = new Pair.Pair(pair.key, pair.value);
+ const prev = findPair(this.items, _pair.key);
+ const sortEntries = (_a = this.schema) === null || _a === void 0 ? void 0 : _a.sortMapEntries;
+ if (prev) {
+ if (!overwrite)
+ throw new Error(`Key ${_pair.key} already set`);
+ // For scalars, keep the old node & its comments and anchors
+ if (Node.isScalar(prev.value) && Scalar.isScalarValue(_pair.value))
+ prev.value.value = _pair.value;
+ else
+ prev.value = _pair.value;
+ }
+ else if (sortEntries) {
+ const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);
+ if (i === -1)
+ this.items.push(_pair);
+ else
+ this.items.splice(i, 0, _pair);
+ }
+ else {
+ this.items.push(_pair);
+ }
+ }
+ delete(key) {
+ const it = findPair(this.items, key);
+ if (!it)
+ return false;
+ const del = this.items.splice(this.items.indexOf(it), 1);
+ return del.length > 0;
+ }
+ get(key, keepScalar) {
+ const it = findPair(this.items, key);
+ const node = it === null || it === void 0 ? void 0 : it.value;
+ return !keepScalar && Node.isScalar(node) ? node.value : node;
+ }
+ has(key) {
+ return !!findPair(this.items, key);
+ }
+ set(key, value) {
+ this.add(new Pair.Pair(key, value), true);
+ }
+ /**
+ * @param ctx - Conversion context, originally set in Document#toJS()
+ * @param {Class} Type - If set, forces the returned collection type
+ * @returns Instance of Type, Map, or Object
+ */
+ toJSON(_, ctx, Type) {
+ const map = Type ? new Type() : (ctx === null || ctx === void 0 ? void 0 : ctx.mapAsMap) ? new Map() : {};
+ if (ctx === null || ctx === void 0 ? void 0 : ctx.onCreate)
+ ctx.onCreate(map);
+ for (const item of this.items)
+ addPairToJSMap.addPairToJSMap(ctx, map, item);
+ return map;
+ }
+ toString(ctx, onComment, onChompKeep) {
+ if (!ctx)
+ return JSON.stringify(this);
+ for (const item of this.items) {
+ if (!Node.isPair(item))
+ throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);
+ }
+ if (!ctx.allNullValues && this.hasAllNullValues(false))
+ ctx = Object.assign({}, ctx, { allNullValues: true });
+ return stringifyCollection.stringifyCollection(this, ctx, {
+ blockItemPrefix: '',
+ flowChars: { start: '{', end: '}' },
+ itemIndent: ctx.indent || '',
+ onChompKeep,
+ onComment
+ });
+ }
+}
+
+exports.YAMLMap = YAMLMap;
+exports.findPair = findPair;
diff --git a/node_modules/yaml/dist/nodes/YAMLSeq.d.ts b/node_modules/yaml/dist/nodes/YAMLSeq.d.ts
new file mode 100644
index 0000000..f892a97
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/YAMLSeq.d.ts
@@ -0,0 +1,55 @@
+import type { BlockSequence, FlowCollection } from '../parse/cst.js';
+import type { Schema } from '../schema/Schema.js';
+import type { StringifyContext } from '../stringify/stringify.js';
+import { Collection } from './Collection.js';
+import { ParsedNode, Range } from './Node.js';
+import type { Pair } from './Pair.js';
+import { ToJSContext } from './toJS.js';
+export declare namespace YAMLSeq {
+ interface Parsed<T extends ParsedNode | Pair<ParsedNode, ParsedNode | null> = ParsedNode> extends YAMLSeq<T> {
+ items: T[];
+ range: Range;
+ srcToken?: BlockSequence | FlowCollection;
+ }
+}
+export declare class YAMLSeq<T = unknown> extends Collection {
+ static get tagName(): 'tag:yaml.org,2002:seq';
+ items: T[];
+ constructor(schema?: Schema);
+ add(value: T): void;
+ /**
+ * Removes a value from the collection.
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ *
+ * @returns `true` if the item was found and removed.
+ */
+ delete(key: unknown): boolean;
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ */
+ get(key: unknown, keepScalar?: boolean): unknown;
+ /**
+ * Checks if the collection includes a value with the key `key`.
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ */
+ has(key: unknown): boolean;
+ /**
+ * Sets a value in this collection. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ *
+ * If `key` does not contain a representation of an integer, this will throw.
+ * It may be wrapped in a `Scalar`.
+ */
+ set(key: unknown, value: T): void;
+ toJSON(_?: unknown, ctx?: ToJSContext): unknown[];
+ toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
+}
diff --git a/node_modules/yaml/dist/nodes/YAMLSeq.js b/node_modules/yaml/dist/nodes/YAMLSeq.js
new file mode 100644
index 0000000..fe5451a
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/YAMLSeq.js
@@ -0,0 +1,107 @@
+'use strict';
+
+var stringifyCollection = require('../stringify/stringifyCollection.js');
+var Collection = require('./Collection.js');
+var Node = require('./Node.js');
+var Scalar = require('./Scalar.js');
+var toJS = require('./toJS.js');
+
+class YAMLSeq extends Collection.Collection {
+ constructor(schema) {
+ super(Node.SEQ, schema);
+ this.items = [];
+ }
+ static get tagName() {
+ return 'tag:yaml.org,2002:seq';
+ }
+ add(value) {
+ this.items.push(value);
+ }
+ /**
+ * Removes a value from the collection.
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ *
+ * @returns `true` if the item was found and removed.
+ */
+ delete(key) {
+ const idx = asItemIndex(key);
+ if (typeof idx !== 'number')
+ return false;
+ const del = this.items.splice(idx, 1);
+ return del.length > 0;
+ }
+ /**
+ * Returns item at `key`, or `undefined` if not found. By default unwraps
+ * scalar values from their surrounding node; to disable set `keepScalar` to
+ * `true` (collections are always returned intact).
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ */
+ get(key, keepScalar) {
+ const idx = asItemIndex(key);
+ if (typeof idx !== 'number')
+ return undefined;
+ const it = this.items[idx];
+ return !keepScalar && Node.isScalar(it) ? it.value : it;
+ }
+ /**
+ * Checks if the collection includes a value with the key `key`.
+ *
+ * `key` must contain a representation of an integer for this to succeed.
+ * It may be wrapped in a `Scalar`.
+ */
+ has(key) {
+ const idx = asItemIndex(key);
+ return typeof idx === 'number' && idx < this.items.length;
+ }
+ /**
+ * Sets a value in this collection. For `!!set`, `value` needs to be a
+ * boolean to add/remove the item from the set.
+ *
+ * If `key` does not contain a representation of an integer, this will throw.
+ * It may be wrapped in a `Scalar`.
+ */
+ set(key, value) {
+ const idx = asItemIndex(key);
+ if (typeof idx !== 'number')
+ throw new Error(`Expected a valid index, not ${key}.`);
+ const prev = this.items[idx];
+ if (Node.isScalar(prev) && Scalar.isScalarValue(value))
+ prev.value = value;
+ else
+ this.items[idx] = value;
+ }
+ toJSON(_, ctx) {
+ const seq = [];
+ if (ctx === null || ctx === void 0 ? void 0 : ctx.onCreate)
+ ctx.onCreate(seq);
+ let i = 0;
+ for (const item of this.items)
+ seq.push(toJS.toJS(item, String(i++), ctx));
+ return seq;
+ }
+ toString(ctx, onComment, onChompKeep) {
+ if (!ctx)
+ return JSON.stringify(this);
+ return stringifyCollection.stringifyCollection(this, ctx, {
+ blockItemPrefix: '- ',
+ flowChars: { start: '[', end: ']' },
+ itemIndent: (ctx.indent || '') + ' ',
+ onChompKeep,
+ onComment
+ });
+ }
+}
+function asItemIndex(key) {
+ let idx = Node.isScalar(key) ? key.value : key;
+ if (idx && typeof idx === 'string')
+ idx = Number(idx);
+ return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0
+ ? idx
+ : null;
+}
+
+exports.YAMLSeq = YAMLSeq;
diff --git a/node_modules/yaml/dist/nodes/addPairToJSMap.d.ts b/node_modules/yaml/dist/nodes/addPairToJSMap.d.ts
new file mode 100644
index 0000000..5b0bb11
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/addPairToJSMap.d.ts
@@ -0,0 +1,3 @@
+import type { Pair } from './Pair.js';
+import { ToJSContext } from './toJS.js';
+export declare function addPairToJSMap(ctx: ToJSContext | undefined, map: Map<unknown, unknown> | Set<unknown> | Record<string | number | symbol, unknown>, { key, value }: Pair): Map<unknown, unknown> | Set<unknown> | Record<string | number | symbol, unknown>;
diff --git a/node_modules/yaml/dist/nodes/addPairToJSMap.js b/node_modules/yaml/dist/nodes/addPairToJSMap.js
new file mode 100644
index 0000000..5c0f755
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/addPairToJSMap.js
@@ -0,0 +1,106 @@
+'use strict';
+
+var log = require('../log.js');
+var stringify = require('../stringify/stringify.js');
+var Node = require('./Node.js');
+var Scalar = require('./Scalar.js');
+var toJS = require('./toJS.js');
+
+const MERGE_KEY = '<<';
+function addPairToJSMap(ctx, map, { key, value }) {
+ if ((ctx === null || ctx === void 0 ? void 0 : ctx.doc.schema.merge) && isMergeKey(key)) {
+ value = Node.isAlias(value) ? value.resolve(ctx.doc) : value;
+ if (Node.isSeq(value))
+ for (const it of value.items)
+ mergeToJSMap(ctx, map, it);
+ else if (Array.isArray(value))
+ for (const it of value)
+ mergeToJSMap(ctx, map, it);
+ else
+ mergeToJSMap(ctx, map, value);
+ }
+ else {
+ const jsKey = toJS.toJS(key, '', ctx);
+ if (map instanceof Map) {
+ map.set(jsKey, toJS.toJS(value, jsKey, ctx));
+ }
+ else if (map instanceof Set) {
+ map.add(jsKey);
+ }
+ else {
+ const stringKey = stringifyKey(key, jsKey, ctx);
+ const jsValue = toJS.toJS(value, stringKey, ctx);
+ if (stringKey in map)
+ Object.defineProperty(map, stringKey, {
+ value: jsValue,
+ writable: true,
+ enumerable: true,
+ configurable: true
+ });
+ else
+ map[stringKey] = jsValue;
+ }
+ }
+ return map;
+}
+const isMergeKey = (key) => key === MERGE_KEY ||
+ (Node.isScalar(key) &&
+ key.value === MERGE_KEY &&
+ (!key.type || key.type === Scalar.Scalar.PLAIN));
+// If the value associated with a merge key is a single mapping node, each of
+// its key/value pairs is inserted into the current mapping, unless the key
+// already exists in it. If the value associated with the merge key is a
+// sequence, then this sequence is expected to contain mapping nodes and each
+// of these nodes is merged in turn according to its order in the sequence.
+// Keys in mapping nodes earlier in the sequence override keys specified in
+// later mapping nodes. -- http://yaml.org/type/merge.html
+function mergeToJSMap(ctx, map, value) {
+ const source = ctx && Node.isAlias(value) ? value.resolve(ctx.doc) : value;
+ if (!Node.isMap(source))
+ throw new Error('Merge sources must be maps or map aliases');
+ const srcMap = source.toJSON(null, ctx, Map);
+ for (const [key, value] of srcMap) {
+ if (map instanceof Map) {
+ if (!map.has(key))
+ map.set(key, value);
+ }
+ else if (map instanceof Set) {
+ map.add(key);
+ }
+ else if (!Object.prototype.hasOwnProperty.call(map, key)) {
+ Object.defineProperty(map, key, {
+ value,
+ writable: true,
+ enumerable: true,
+ configurable: true
+ });
+ }
+ }
+ return map;
+}
+function stringifyKey(key, jsKey, ctx) {
+ if (jsKey === null)
+ return '';
+ if (typeof jsKey !== 'object')
+ return String(jsKey);
+ if (Node.isNode(key) && ctx && ctx.doc) {
+ const strCtx = stringify.createStringifyContext(ctx.doc, {});
+ strCtx.anchors = new Set();
+ for (const node of ctx.anchors.keys())
+ strCtx.anchors.add(node.anchor);
+ strCtx.inFlow = true;
+ strCtx.inStringifyKey = true;
+ const strKey = key.toString(strCtx);
+ if (!ctx.mapKeyWarned) {
+ let jsonStr = JSON.stringify(strKey);
+ if (jsonStr.length > 40)
+ jsonStr = jsonStr.substring(0, 36) + '..."';
+ log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);
+ ctx.mapKeyWarned = true;
+ }
+ return strKey;
+ }
+ return JSON.stringify(jsKey);
+}
+
+exports.addPairToJSMap = addPairToJSMap;
diff --git a/node_modules/yaml/dist/nodes/toJS.d.ts b/node_modules/yaml/dist/nodes/toJS.d.ts
new file mode 100644
index 0000000..85d794d
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/toJS.d.ts
@@ -0,0 +1,30 @@
+import type { Document } from '../doc/Document.js';
+import type { stringify } from '../stringify/stringify.js';
+import { Node } from './Node.js';
+export interface AnchorData {
+ aliasCount: number;
+ count: number;
+ res: unknown;
+}
+export interface ToJSContext {
+ anchors: Map<Node, AnchorData>;
+ doc: Document;
+ keep: boolean;
+ mapAsMap: boolean;
+ mapKeyWarned: boolean;
+ maxAliasCount: number;
+ onCreate?: (res: unknown) => void;
+ /** Requiring this directly in Pair would create circular dependencies */
+ stringify: typeof stringify;
+}
+/**
+ * Recursively convert any node or its contents to native JavaScript
+ *
+ * @param value - The input value
+ * @param arg - If `value` defines a `toJSON()` method, use this
+ * as its first argument
+ * @param ctx - Conversion context, originally set in Document#toJS(). If
+ * `{ keep: true }` is not set, output should be suitable for JSON
+ * stringification.
+ */
+export declare function toJS(value: any, arg: string | null, ctx?: ToJSContext): any;
diff --git a/node_modules/yaml/dist/nodes/toJS.js b/node_modules/yaml/dist/nodes/toJS.js
new file mode 100644
index 0000000..b155e6c
--- /dev/null
+++ b/node_modules/yaml/dist/nodes/toJS.js
@@ -0,0 +1,39 @@
+'use strict';
+
+var Node = require('./Node.js');
+
+/**
+ * Recursively convert any node or its contents to native JavaScript
+ *
+ * @param value - The input value
+ * @param arg - If `value` defines a `toJSON()` method, use this
+ * as its first argument
+ * @param ctx - Conversion context, originally set in Document#toJS(). If
+ * `{ keep: true }` is not set, output should be suitable for JSON
+ * stringification.
+ */
+function toJS(value, arg, ctx) {
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-return
+ if (Array.isArray(value))
+ return value.map((v, i) => toJS(v, String(i), ctx));
+ if (value && typeof value.toJSON === 'function') {
+ // eslint-disable-next-line @typescript-eslint/no-unsafe-call
+ if (!ctx || !Node.hasAnchor(value))
+ return value.toJSON(arg, ctx);
+ const data = { aliasCount: 0, count: 1, res: undefined };
+ ctx.anchors.set(value, data);
+ ctx.onCreate = res => {
+ data.res = res;
+ delete ctx.onCreate;
+ };
+ const res = value.toJSON(arg, ctx);
+ if (ctx.onCreate)
+ ctx.onCreate(res);
+ return res;
+ }
+ if (typeof value === 'bigint' && !(ctx === null || ctx === void 0 ? void 0 : ctx.keep))
+ return Number(value);
+ return value;
+}
+
+exports.toJS = toJS;
diff --git a/node_modules/yaml/dist/options.d.ts b/node_modules/yaml/dist/options.d.ts
new file mode 100644
index 0000000..9444c2e
--- /dev/null
+++ b/node_modules/yaml/dist/options.d.ts
@@ -0,0 +1,330 @@
+import type { Reviver } from './doc/applyReviver.js';
+import type { Directives } from './doc/directives.js';
+import type { LogLevelId } from './log.js';
+import type { ParsedNode } from './nodes/Node.js';
+import type { Pair } from './nodes/Pair.js';
+import type { Scalar } from './nodes/Scalar.js';
+import type { LineCounter } from './parse/line-counter.js';
+import type { Schema } from './schema/Schema.js';
+import type { Tags } from './schema/tags.js';
+import type { CollectionTag, ScalarTag } from './schema/types.js';
+export declare type ParseOptions = {
+ /**
+ * Whether integers should be parsed into BigInt rather than number values.
+ *
+ * Default: `false`
+ *
+ * https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/BigInt
+ */
+ intAsBigInt?: boolean;
+ /**
+ * Include a `srcToken` value on each parsed `Node`, containing the CST token
+ * that was composed into this node.
+ *
+ * Default: `false`
+ */
+ keepSourceTokens?: boolean;
+ /**
+ * If set, newlines will be tracked, to allow for `lineCounter.linePos(offset)`
+ * to provide the `{ line, col }` positions within the input.
+ */
+ lineCounter?: LineCounter;
+ /**
+ * Include line/col position & node type directly in parse errors.
+ *
+ * Default: `true`
+ */
+ prettyErrors?: boolean;
+ /**
+ * Detect and report errors that are required by the YAML 1.2 spec,
+ * but are caused by unambiguous content.
+ *
+ * Default: `true`
+ */
+ strict?: boolean;
+ /**
+ * YAML requires map keys to be unique. By default, this is checked by
+ * comparing scalar values with `===`; deep equality is not checked for
+ * aliases or collections. If merge keys are enabled by the schema,
+ * multiple `<<` keys are allowed.
+ *
+ * Set `false` to disable, or provide your own comparator function to
+ * customise. The comparator will be passed two `ParsedNode` values, and
+ * is expected to return a `boolean` indicating their equality.
+ *
+ * Default: `true`
+ */
+ uniqueKeys?: boolean | ((a: ParsedNode, b: ParsedNode) => boolean);
+};
+export declare type DocumentOptions = {
+ /**
+ * Used internally by Composer. If set and includes an explicit version,
+ * that overrides the `version` option.
+ */
+ directives?: Directives;
+ /**
+ * Control the logging level during parsing
+ *
+ * Default: `'warn'`
+ */
+ logLevel?: LogLevelId;
+ /**
+ * The YAML version used by documents without a `%YAML` directive.
+ *
+ * Default: `"1.2"`
+ */
+ version?: '1.1' | '1.2' | 'next';
+};
+export declare type SchemaOptions = {
+ /**
+ * When parsing, warn about compatibility issues with the given schema.
+ * When stringifying, use scalar styles that are parsed correctly
+ * by the `compat` schema as well as the actual schema.
+ *
+ * Default: `null`
+ */
+ compat?: string | Tags | null;
+ /**
+ * Array of additional tags to include in the schema, or a function that may
+ * modify the schema's base tag array.
+ */
+ customTags?: Tags | ((tags: Tags) => Tags) | null;
+ /**
+ * Enable support for `<<` merge keys.
+ *
+ * Default: `false` for YAML 1.2, `true` for earlier versions
+ */
+ merge?: boolean;
+ /**
+ * When using the `'core'` schema, support parsing values with these
+ * explicit YAML 1.1 tags:
+ *
+ * `!!binary`, `!!omap`, `!!pairs`, `!!set`, `!!timestamp`.
+ *
+ * Default `true`
+ */
+ resolveKnownTags?: boolean;
+ /**
+ * The base schema to use.
+ *
+ * The core library has built-in support for the following:
+ * - `'failsafe'`: A minimal schema that parses all scalars as strings
+ * - `'core'`: The YAML 1.2 core schema
+ * - `'json'`: The YAML 1.2 JSON schema, with minimal rules for JSON compatibility
+ * - `'yaml-1.1'`: The YAML 1.1 schema
+ *
+ * If using another (custom) schema, the `customTags` array needs to
+ * fully define the schema's tags.
+ *
+ * Default: `'core'` for YAML 1.2, `'yaml-1.1'` for earlier versions
+ */
+ schema?: string | Schema;
+ /**
+ * When adding to or stringifying a map, sort the entries.
+ * If `true`, sort by comparing key values with `<`.
+ * Does not affect item order when parsing.
+ *
+ * Default: `false`
+ */
+ sortMapEntries?: boolean | ((a: Pair, b: Pair) => number);
+ /**
+ * Override default values for `toString()` options.
+ */
+ toStringDefaults?: ToStringOptions;
+};
+export declare type CreateNodeOptions = {
+ /**
+ * During node construction, use anchors and aliases to keep strictly equal
+ * non-null objects as equivalent in YAML.
+ *
+ * Default: `true`
+ */
+ aliasDuplicateObjects?: boolean;
+ /**
+ * Default prefix for anchors.
+ *
+ * Default: `'a'`, resulting in anchors `a1`, `a2`, etc.
+ */
+ anchorPrefix?: string;
+ /** Force the top-level collection node to use flow style. */
+ flow?: boolean;
+ /**
+ * Keep `undefined` object values when creating mappings, rather than
+ * discarding them.
+ *
+ * Default: `false`
+ */
+ keepUndefined?: boolean | null;
+ onTagObj?: (tagObj: ScalarTag | CollectionTag) => void;
+ /**
+ * Specify the top-level collection type, e.g. `"!!omap"`. Note that this
+ * requires the corresponding tag to be available in this document's schema.
+ */
+ tag?: string;
+};
+export declare type ToJSOptions = {
+ /**
+ * Use Map rather than Object to represent mappings.
+ *
+ * Default: `false`
+ */
+ mapAsMap?: boolean;
+ /**
+ * Prevent exponential entity expansion attacks by limiting data aliasing count;
+ * set to `-1` to disable checks; `0` disallows all alias nodes.
+ *
+ * Default: `100`
+ */
+ maxAliasCount?: number;
+ /**
+ * If defined, called with the resolved `value` and reference `count` for
+ * each anchor in the document.
+ */
+ onAnchor?: (value: unknown, count: number) => void;
+ /**
+ * Optional function that may filter or modify the output JS value
+ *
+ * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#using_the_reviver_parameter
+ */
+ reviver?: Reviver;
+};
+export declare type ToStringOptions = {
+ /**
+ * Use block quote styles for scalar values where applicable.
+ * Set to `false` to disable block quotes completely.
+ *
+ * Default: `true`
+ */
+ blockQuote?: boolean | 'folded' | 'literal';
+ /**
+ * Enforce `'block'` or `'flow'` style on maps and sequences.
+ * Empty collections will always be stringified as `{}` or `[]`.
+ *
+ * Default: `'any'`, allowing each node to set its style separately
+ * with its `flow: boolean` (default `false`) property.
+ */
+ collectionStyle?: 'any' | 'block' | 'flow';
+ /**
+ * Comment stringifier.
+ * Output should be valid for the current schema.
+ *
+ * By default, empty comment lines are left empty,
+ * lines consisting of a single space are replaced by `#`,
+ * and all other lines are prefixed with a `#`.
+ */
+ commentString?: (comment: string) => string;
+ /**
+ * The default type of string literal used to stringify implicit key values.
+ * Output may use other types if required to fully represent the value.
+ *
+ * If `null`, the value of `defaultStringType` is used.
+ *
+ * Default: `null`
+ */
+ defaultKeyType?: Scalar.Type | null;
+ /**
+ * The default type of string literal used to stringify values in general.
+ * Output may use other types if required to fully represent the value.
+ *
+ * Default: `'PLAIN'`
+ */
+ defaultStringType?: Scalar.Type;
+ /**
+ * Include directives in the output.
+ *
+ * - If `true`, at least the document-start marker `---` is always included.
+ * This does not force the `%YAML` directive to be included. To do that,
+ * set `doc.directives.yaml.explicit = true`.
+ * - If `false`, no directives or marker is ever included. If using the `%TAG`
+ * directive, you are expected to include it manually in the stream before
+ * its use.
+ * - If `null`, directives and marker may be included if required.
+ *
+ * Default: `null`
+ */
+ directives?: boolean | null;
+ /**
+ * Restrict double-quoted strings to use JSON-compatible syntax.
+ *
+ * Default: `false`
+ */
+ doubleQuotedAsJSON?: boolean;
+ /**
+ * Minimum length for double-quoted strings to use multiple lines to
+ * represent the value. Ignored if `doubleQuotedAsJSON` is set.
+ *
+ * Default: `40`
+ */
+ doubleQuotedMinMultiLineLength?: number;
+ /**
+ * String representation for `false`.
+ * With the core schema, use `'false'`, `'False'`, or `'FALSE'`.
+ *
+ * Default: `'false'`
+ */
+ falseStr?: string;
+ /**
+ * The number of spaces to use when indenting code.
+ *
+ * Default: `2`
+ */
+ indent?: number;
+ /**
+ * Whether block sequences should be indented.
+ *
+ * Default: `true`
+ */
+ indentSeq?: boolean;
+ /**
+ * Maximum line width (set to `0` to disable folding).
+ *
+ * This is a soft limit, as only double-quoted semantics allow for inserting
+ * a line break in the middle of a word, as well as being influenced by the
+ * `minContentWidth` option.
+ *
+ * Default: `80`
+ */
+ lineWidth?: number;
+ /**
+ * Minimum line width for highly-indented content (set to `0` to disable).
+ *
+ * Default: `20`
+ */
+ minContentWidth?: number;
+ /**
+ * String representation for `null`.
+ * With the core schema, use `'null'`, `'Null'`, `'NULL'`, `'~'`, or an empty
+ * string `''`.
+ *
+ * Default: `'null'`
+ */
+ nullStr?: string;
+ /**
+ * Require keys to be scalars and to use implicit rather than explicit notation.
+ *
+ * Default: `false`
+ */
+ simpleKeys?: boolean;
+ /**
+ * Use 'single quote' rather than "double quote" where applicable.
+ * Set to `false` to disable single quotes completely.
+ *
+ * Default: `null`
+ */
+ singleQuote?: boolean | null;
+ /**
+ * String representation for `true`.
+ * With the core schema, use `'true'`, `'True'`, or `'TRUE'`.
+ *
+ * Default: `'true'`
+ */
+ trueStr?: string;
+ /**
+ * The anchor used by an alias must be defined before the alias node. As it's
+ * possible for the document to be modified manually, the order may be
+ * verified during stringification.
+ *
+ * Default: `'true'`
+ */
+ verifyAliasOrder?: boolean;
+};
diff --git a/node_modules/yaml/dist/parse/cst-scalar.d.ts b/node_modules/yaml/dist/parse/cst-scalar.d.ts
new file mode 100644
index 0000000..a7bd1d6
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst-scalar.d.ts
@@ -0,0 +1,64 @@
+import { ErrorCode } from '../errors.js';
+import { Range } from '../nodes/Node.js';
+import type { Scalar } from '../nodes/Scalar.js';
+import type { BlockScalar, FlowScalar, SourceToken, Token } from './cst.js';
+/**
+ * If `token` is a CST flow or block scalar, determine its string value and a few other attributes.
+ * Otherwise, return `null`.
+ */
+export declare function resolveAsScalar(token: FlowScalar | BlockScalar, strict?: boolean, onError?: (offset: number, code: ErrorCode, message: string) => void): {
+ value: string;
+ type: Scalar.Type | null;
+ comment: string;
+ range: Range;
+};
+export declare function resolveAsScalar(token: Token | null | undefined, strict?: boolean, onError?: (offset: number, code: ErrorCode, message: string) => void): {
+ value: string;
+ type: Scalar.Type | null;
+ comment: string;
+ range: Range;
+} | null;
+/**
+ * Create a new scalar token with `value`
+ *
+ * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
+ * as this function does not support any schema operations and won't check for such conflicts.
+ *
+ * @param value The string representation of the value, which will have its content properly indented.
+ * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.
+ * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
+ * @param context.indent The indent level of the token.
+ * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.
+ * @param context.offset The offset position of the token.
+ * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
+ */
+export declare function createScalarToken(value: string, context: {
+ end?: SourceToken[];
+ implicitKey?: boolean;
+ indent: number;
+ inFlow?: boolean;
+ offset?: number;
+ type?: Scalar.Type;
+}): BlockScalar | FlowScalar;
+/**
+ * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
+ *
+ * Best efforts are made to retain any comments previously associated with the `token`,
+ * though all contents within a collection's `items` will be overwritten.
+ *
+ * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
+ * as this function does not support any schema operations and won't check for such conflicts.
+ *
+ * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
+ * @param value The string representation of the value, which will have its content properly indented.
+ * @param context.afterKey In most cases, values after a key should have an additional level of indentation.
+ * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
+ * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
+ * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
+ */
+export declare function setScalarValue(token: Token, value: string, context?: {
+ afterKey?: boolean;
+ implicitKey?: boolean;
+ inFlow?: boolean;
+ type?: Scalar.Type;
+}): void;
diff --git a/node_modules/yaml/dist/parse/cst-scalar.js b/node_modules/yaml/dist/parse/cst-scalar.js
new file mode 100644
index 0000000..6bee70e
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst-scalar.js
@@ -0,0 +1,219 @@
+'use strict';
+
+var resolveBlockScalar = require('../compose/resolve-block-scalar.js');
+var resolveFlowScalar = require('../compose/resolve-flow-scalar.js');
+var errors = require('../errors.js');
+var stringifyString = require('../stringify/stringifyString.js');
+
+function resolveAsScalar(token, strict = true, onError) {
+ if (token) {
+ const _onError = (pos, code, message) => {
+ const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;
+ if (onError)
+ onError(offset, code, message);
+ else
+ throw new errors.YAMLParseError([offset, offset + 1], code, message);
+ };
+ switch (token.type) {
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ return resolveFlowScalar.resolveFlowScalar(token, strict, _onError);
+ case 'block-scalar':
+ return resolveBlockScalar.resolveBlockScalar(token, strict, _onError);
+ }
+ }
+ return null;
+}
+/**
+ * Create a new scalar token with `value`
+ *
+ * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
+ * as this function does not support any schema operations and won't check for such conflicts.
+ *
+ * @param value The string representation of the value, which will have its content properly indented.
+ * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.
+ * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
+ * @param context.indent The indent level of the token.
+ * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.
+ * @param context.offset The offset position of the token.
+ * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
+ */
+function createScalarToken(value, context) {
+ var _a;
+ const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;
+ const source = stringifyString.stringifyString({ type, value }, {
+ implicitKey,
+ indent: indent > 0 ? ' '.repeat(indent) : '',
+ inFlow,
+ options: { blockQuote: true, lineWidth: -1 }
+ });
+ const end = (_a = context.end) !== null && _a !== void 0 ? _a : [
+ { type: 'newline', offset: -1, indent, source: '\n' }
+ ];
+ switch (source[0]) {
+ case '|':
+ case '>': {
+ const he = source.indexOf('\n');
+ const head = source.substring(0, he);
+ const body = source.substring(he + 1) + '\n';
+ const props = [
+ { type: 'block-scalar-header', offset, indent, source: head }
+ ];
+ if (!addEndtoBlockProps(props, end))
+ props.push({ type: 'newline', offset: -1, indent, source: '\n' });
+ return { type: 'block-scalar', offset, indent, props, source: body };
+ }
+ case '"':
+ return { type: 'double-quoted-scalar', offset, indent, source, end };
+ case "'":
+ return { type: 'single-quoted-scalar', offset, indent, source, end };
+ default:
+ return { type: 'scalar', offset, indent, source, end };
+ }
+}
+/**
+ * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
+ *
+ * Best efforts are made to retain any comments previously associated with the `token`,
+ * though all contents within a collection's `items` will be overwritten.
+ *
+ * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
+ * as this function does not support any schema operations and won't check for such conflicts.
+ *
+ * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
+ * @param value The string representation of the value, which will have its content properly indented.
+ * @param context.afterKey In most cases, values after a key should have an additional level of indentation.
+ * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
+ * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
+ * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
+ */
+function setScalarValue(token, value, context = {}) {
+ let { afterKey = false, implicitKey = false, inFlow = false, type } = context;
+ let indent = 'indent' in token ? token.indent : null;
+ if (afterKey && typeof indent === 'number')
+ indent += 2;
+ if (!type)
+ switch (token.type) {
+ case 'single-quoted-scalar':
+ type = 'QUOTE_SINGLE';
+ break;
+ case 'double-quoted-scalar':
+ type = 'QUOTE_DOUBLE';
+ break;
+ case 'block-scalar': {
+ const header = token.props[0];
+ if (header.type !== 'block-scalar-header')
+ throw new Error('Invalid block scalar header');
+ type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';
+ break;
+ }
+ default:
+ type = 'PLAIN';
+ }
+ const source = stringifyString.stringifyString({ type, value }, {
+ implicitKey: implicitKey || indent === null,
+ indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',
+ inFlow,
+ options: { blockQuote: true, lineWidth: -1 }
+ });
+ switch (source[0]) {
+ case '|':
+ case '>':
+ setBlockScalarValue(token, source);
+ break;
+ case '"':
+ setFlowScalarValue(token, source, 'double-quoted-scalar');
+ break;
+ case "'":
+ setFlowScalarValue(token, source, 'single-quoted-scalar');
+ break;
+ default:
+ setFlowScalarValue(token, source, 'scalar');
+ }
+}
+function setBlockScalarValue(token, source) {
+ const he = source.indexOf('\n');
+ const head = source.substring(0, he);
+ const body = source.substring(he + 1) + '\n';
+ if (token.type === 'block-scalar') {
+ const header = token.props[0];
+ if (header.type !== 'block-scalar-header')
+ throw new Error('Invalid block scalar header');
+ header.source = head;
+ token.source = body;
+ }
+ else {
+ const { offset } = token;
+ const indent = 'indent' in token ? token.indent : -1;
+ const props = [
+ { type: 'block-scalar-header', offset, indent, source: head }
+ ];
+ if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))
+ props.push({ type: 'newline', offset: -1, indent, source: '\n' });
+ for (const key of Object.keys(token))
+ if (key !== 'type' && key !== 'offset')
+ delete token[key];
+ Object.assign(token, { type: 'block-scalar', indent, props, source: body });
+ }
+}
+/** @returns `true` if last token is a newline */
+function addEndtoBlockProps(props, end) {
+ if (end)
+ for (const st of end)
+ switch (st.type) {
+ case 'space':
+ case 'comment':
+ props.push(st);
+ break;
+ case 'newline':
+ props.push(st);
+ return true;
+ }
+ return false;
+}
+function setFlowScalarValue(token, source, type) {
+ switch (token.type) {
+ case 'scalar':
+ case 'double-quoted-scalar':
+ case 'single-quoted-scalar':
+ token.type = type;
+ token.source = source;
+ break;
+ case 'block-scalar': {
+ const end = token.props.slice(1);
+ let oa = source.length;
+ if (token.props[0].type === 'block-scalar-header')
+ oa -= token.props[0].source.length;
+ for (const tok of end)
+ tok.offset += oa;
+ delete token.props;
+ Object.assign(token, { type, source, end });
+ break;
+ }
+ case 'block-map':
+ case 'block-seq': {
+ const offset = token.offset + source.length;
+ const nl = { type: 'newline', offset, indent: token.indent, source: '\n' };
+ delete token.items;
+ Object.assign(token, { type, source, end: [nl] });
+ break;
+ }
+ default: {
+ const indent = 'indent' in token ? token.indent : -1;
+ const end = 'end' in token && Array.isArray(token.end)
+ ? token.end.filter(st => st.type === 'space' ||
+ st.type === 'comment' ||
+ st.type === 'newline')
+ : [];
+ for (const key of Object.keys(token))
+ if (key !== 'type' && key !== 'offset')
+ delete token[key];
+ Object.assign(token, { type, indent, source, end });
+ }
+ }
+}
+
+exports.createScalarToken = createScalarToken;
+exports.resolveAsScalar = resolveAsScalar;
+exports.setScalarValue = setScalarValue;
diff --git a/node_modules/yaml/dist/parse/cst-stringify.d.ts b/node_modules/yaml/dist/parse/cst-stringify.d.ts
new file mode 100644
index 0000000..dbf66d6
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst-stringify.d.ts
@@ -0,0 +1,8 @@
+import type { CollectionItem, Token } from './cst.js';
+/**
+ * Stringify a CST document, token, or collection item
+ *
+ * Fair warning: This applies no validation whatsoever, and
+ * simply concatenates the sources in their logical order.
+ */
+export declare const stringify: (cst: Token | CollectionItem) => string;
diff --git a/node_modules/yaml/dist/parse/cst-stringify.js b/node_modules/yaml/dist/parse/cst-stringify.js
new file mode 100644
index 0000000..78e8c37
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst-stringify.js
@@ -0,0 +1,63 @@
+'use strict';
+
+/**
+ * Stringify a CST document, token, or collection item
+ *
+ * Fair warning: This applies no validation whatsoever, and
+ * simply concatenates the sources in their logical order.
+ */
+const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);
+function stringifyToken(token) {
+ switch (token.type) {
+ case 'block-scalar': {
+ let res = '';
+ for (const tok of token.props)
+ res += stringifyToken(tok);
+ return res + token.source;
+ }
+ case 'block-map':
+ case 'block-seq': {
+ let res = '';
+ for (const item of token.items)
+ res += stringifyItem(item);
+ return res;
+ }
+ case 'flow-collection': {
+ let res = token.start.source;
+ for (const item of token.items)
+ res += stringifyItem(item);
+ for (const st of token.end)
+ res += st.source;
+ return res;
+ }
+ case 'document': {
+ let res = stringifyItem(token);
+ if (token.end)
+ for (const st of token.end)
+ res += st.source;
+ return res;
+ }
+ default: {
+ let res = token.source;
+ if ('end' in token && token.end)
+ for (const st of token.end)
+ res += st.source;
+ return res;
+ }
+ }
+}
+function stringifyItem({ start, key, sep, value }) {
+ let res = '';
+ for (const st of start)
+ res += st.source;
+ if (key)
+ res += stringifyToken(key);
+ if (sep)
+ for (const st of sep)
+ res += st.source;
+ if (value)
+ res += stringifyToken(value);
+ return res;
+}
+
+exports.stringify = stringify;
diff --git a/node_modules/yaml/dist/parse/cst-visit.d.ts b/node_modules/yaml/dist/parse/cst-visit.d.ts
new file mode 100644
index 0000000..71c6029
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst-visit.d.ts
@@ -0,0 +1,39 @@
+import type { CollectionItem, Document } from './cst.js';
+export declare type VisitPath = readonly ['key' | 'value', number][];
+export declare type Visitor = (item: CollectionItem, path: VisitPath) => number | symbol | Visitor | void;
+/**
+ * Apply a visitor to a CST document or item.
+ *
+ * Walks through the tree (depth-first) starting from the root, calling a
+ * `visitor` function with two arguments when entering each item:
+ * - `item`: The current item, which included the following members:
+ * - `start: SourceToken[]` – Source tokens before the key or value,
+ * possibly including its anchor or tag.
+ * - `key?: Token | null` – Set for pair values. May then be `null`, if
+ * the key before the `:` separator is empty.
+ * - `sep?: SourceToken[]` – Source tokens between the key and the value,
+ * which should include the `:` map value indicator if `value` is set.
+ * - `value?: Token` – The value of a sequence item, or of a map pair.
+ * - `path`: The steps from the root to the current node, as an array of
+ * `['key' | 'value', number]` tuples.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this token, continue with
+ * next sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current item, then continue with the next one
+ * - `number`: Set the index of the next step. This is useful especially if
+ * the index of the current token has changed.
+ * - `function`: Define the next visitor for this item. After the original
+ * visitor is called on item entry, next visitors are called after handling
+ * a non-empty `key` and when exiting the item.
+ */
+export declare function visit(cst: Document | CollectionItem, visitor: Visitor): void;
+export declare namespace visit {
+ var BREAK: symbol;
+ var SKIP: symbol;
+ var REMOVE: symbol;
+ var itemAtPath: (cst: Document | CollectionItem, path: VisitPath) => CollectionItem | undefined;
+ var parentCollection: (cst: Document | CollectionItem, path: VisitPath) => import("./cst.js").BlockMap | import("./cst.js").BlockSequence | import("./cst.js").FlowCollection;
+}
diff --git a/node_modules/yaml/dist/parse/cst-visit.js b/node_modules/yaml/dist/parse/cst-visit.js
new file mode 100644
index 0000000..dbc47bd
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst-visit.js
@@ -0,0 +1,99 @@
+'use strict';
+
+const BREAK = Symbol('break visit');
+const SKIP = Symbol('skip children');
+const REMOVE = Symbol('remove item');
+/**
+ * Apply a visitor to a CST document or item.
+ *
+ * Walks through the tree (depth-first) starting from the root, calling a
+ * `visitor` function with two arguments when entering each item:
+ * - `item`: The current item, which included the following members:
+ * - `start: SourceToken[]` – Source tokens before the key or value,
+ * possibly including its anchor or tag.
+ * - `key?: Token | null` – Set for pair values. May then be `null`, if
+ * the key before the `:` separator is empty.
+ * - `sep?: SourceToken[]` – Source tokens between the key and the value,
+ * which should include the `:` map value indicator if `value` is set.
+ * - `value?: Token` – The value of a sequence item, or of a map pair.
+ * - `path`: The steps from the root to the current node, as an array of
+ * `['key' | 'value', number]` tuples.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this token, continue with
+ * next sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current item, then continue with the next one
+ * - `number`: Set the index of the next step. This is useful especially if
+ * the index of the current token has changed.
+ * - `function`: Define the next visitor for this item. After the original
+ * visitor is called on item entry, next visitors are called after handling
+ * a non-empty `key` and when exiting the item.
+ */
+function visit(cst, visitor) {
+ if ('type' in cst && cst.type === 'document')
+ cst = { start: cst.start, value: cst.value };
+ _visit(Object.freeze([]), cst, visitor);
+}
+// Without the `as symbol` casts, TS declares these in the `visit`
+// namespace using `var`, but then complains about that because
+// `unique symbol` must be `const`.
+/** Terminate visit traversal completely */
+visit.BREAK = BREAK;
+/** Do not visit the children of the current item */
+visit.SKIP = SKIP;
+/** Remove the current item */
+visit.REMOVE = REMOVE;
+/** Find the item at `path` from `cst` as the root */
+visit.itemAtPath = (cst, path) => {
+ let item = cst;
+ for (const [field, index] of path) {
+ const tok = item === null || item === void 0 ? void 0 : item[field];
+ if (tok && 'items' in tok) {
+ item = tok.items[index];
+ }
+ else
+ return undefined;
+ }
+ return item;
+};
+/**
+ * Get the immediate parent collection of the item at `path` from `cst` as the root.
+ *
+ * Throws an error if the collection is not found, which should never happen if the item itself exists.
+ */
+visit.parentCollection = (cst, path) => {
+ const parent = visit.itemAtPath(cst, path.slice(0, -1));
+ const field = path[path.length - 1][0];
+ const coll = parent === null || parent === void 0 ? void 0 : parent[field];
+ if (coll && 'items' in coll)
+ return coll;
+ throw new Error('Parent collection not found');
+};
+function _visit(path, item, visitor) {
+ let ctrl = visitor(item, path);
+ if (typeof ctrl === 'symbol')
+ return ctrl;
+ for (const field of ['key', 'value']) {
+ const token = item[field];
+ if (token && 'items' in token) {
+ for (let i = 0; i < token.items.length; ++i) {
+ const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);
+ if (typeof ci === 'number')
+ i = ci - 1;
+ else if (ci === BREAK)
+ return BREAK;
+ else if (ci === REMOVE) {
+ token.items.splice(i, 1);
+ i -= 1;
+ }
+ }
+ if (typeof ctrl === 'function' && field === 'key')
+ ctrl = ctrl(item, path);
+ }
+ }
+ return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;
+}
+
+exports.visit = visit;
diff --git a/node_modules/yaml/dist/parse/cst.d.ts b/node_modules/yaml/dist/parse/cst.d.ts
new file mode 100644
index 0000000..e5dd4e7
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst.d.ts
@@ -0,0 +1,106 @@
+export { createScalarToken, resolveAsScalar, setScalarValue } from './cst-scalar.js';
+export { stringify } from './cst-stringify.js';
+export { visit, Visitor, VisitPath } from './cst-visit.js';
+export interface SourceToken {
+ type: 'byte-order-mark' | 'doc-mode' | 'doc-start' | 'space' | 'comment' | 'newline' | 'directive-line' | 'anchor' | 'tag' | 'seq-item-ind' | 'explicit-key-ind' | 'map-value-ind' | 'flow-map-start' | 'flow-map-end' | 'flow-seq-start' | 'flow-seq-end' | 'flow-error-end' | 'comma' | 'block-scalar-header';
+ offset: number;
+ indent: number;
+ source: string;
+}
+export interface ErrorToken {
+ type: 'error';
+ offset: number;
+ source: string;
+ message: string;
+}
+export interface Directive {
+ type: 'directive';
+ offset: number;
+ source: string;
+}
+export interface Document {
+ type: 'document';
+ offset: number;
+ start: SourceToken[];
+ value?: Token;
+ end?: SourceToken[];
+}
+export interface DocumentEnd {
+ type: 'doc-end';
+ offset: number;
+ source: string;
+ end?: SourceToken[];
+}
+export interface FlowScalar {
+ type: 'alias' | 'scalar' | 'single-quoted-scalar' | 'double-quoted-scalar';
+ offset: number;
+ indent: number;
+ source: string;
+ end?: SourceToken[];
+}
+export interface BlockScalar {
+ type: 'block-scalar';
+ offset: number;
+ indent: number;
+ props: Token[];
+ source: string;
+}
+export interface BlockMap {
+ type: 'block-map';
+ offset: number;
+ indent: number;
+ items: Array<{
+ start: SourceToken[];
+ key?: never;
+ sep?: never;
+ value?: never;
+ } | {
+ start: SourceToken[];
+ key: Token | null;
+ sep: SourceToken[];
+ value?: Token;
+ }>;
+}
+export interface BlockSequence {
+ type: 'block-seq';
+ offset: number;
+ indent: number;
+ items: Array<{
+ start: SourceToken[];
+ key?: never;
+ sep?: never;
+ value?: Token;
+ }>;
+}
+export declare type CollectionItem = {
+ start: SourceToken[];
+ key?: Token | null;
+ sep?: SourceToken[];
+ value?: Token;
+};
+export interface FlowCollection {
+ type: 'flow-collection';
+ offset: number;
+ indent: number;
+ start: SourceToken;
+ items: CollectionItem[];
+ end: SourceToken[];
+}
+export declare type Token = SourceToken | ErrorToken | Directive | Document | DocumentEnd | FlowScalar | BlockScalar | BlockMap | BlockSequence | FlowCollection;
+export declare type TokenType = SourceToken['type'] | DocumentEnd['type'] | FlowScalar['type'];
+/** The byte order mark */
+export declare const BOM = "\uFEFF";
+/** Start of doc-mode */
+export declare const DOCUMENT = "\u0002";
+/** Unexpected end of flow-mode */
+export declare const FLOW_END = "\u0018";
+/** Next token is a scalar value */
+export declare const SCALAR = "\u001F";
+/** @returns `true` if `token` is a flow or block collection */
+export declare const isCollection: (token: Token | null | undefined) => token is BlockMap | BlockSequence | FlowCollection;
+/** @returns `true` if `token` is a flow or block scalar; not an alias */
+export declare const isScalar: (token: Token | null | undefined) => token is BlockScalar | FlowScalar;
+/** Get a printable representation of a lexer token */
+export declare function prettyToken(token: string): string;
+/** Identify the type of a lexer token. May return `null` for unknown tokens. */
+export declare function tokenType(source: string): TokenType | null;
diff --git a/node_modules/yaml/dist/parse/cst.js b/node_modules/yaml/dist/parse/cst.js
new file mode 100644
index 0000000..613c229
--- /dev/null
+++ b/node_modules/yaml/dist/parse/cst.js
@@ -0,0 +1,112 @@
+'use strict';
+
+var cstScalar = require('./cst-scalar.js');
+var cstStringify = require('./cst-stringify.js');
+var cstVisit = require('./cst-visit.js');
+
+/** The byte order mark */
+const BOM = '\u{FEFF}';
+/** Start of doc-mode */
+const DOCUMENT = '\x02'; // C0: Start of Text
+/** Unexpected end of flow-mode */
+const FLOW_END = '\x18'; // C0: Cancel
+/** Next token is a scalar value */
+const SCALAR = '\x1f'; // C0: Unit Separator
+/** @returns `true` if `token` is a flow or block collection */
+const isCollection = (token) => !!token && 'items' in token;
+/** @returns `true` if `token` is a flow or block scalar; not an alias */
+const isScalar = (token) => !!token &&
+ (token.type === 'scalar' ||
+ token.type === 'single-quoted-scalar' ||
+ token.type === 'double-quoted-scalar' ||
+ token.type === 'block-scalar');
+/* istanbul ignore next */
+/** Get a printable representation of a lexer token */
+function prettyToken(token) {
+ switch (token) {
+ case BOM:
+ return '<BOM>';
+ case DOCUMENT:
+ return '<DOC>';
+ case FLOW_END:
+ return '<FLOW_END>';
+ case SCALAR:
+ return '<SCALAR>';
+ default:
+ return JSON.stringify(token);
+ }
+}
+/** Identify the type of a lexer token. May return `null` for unknown tokens. */
+function tokenType(source) {
+ switch (source) {
+ case BOM:
+ return 'byte-order-mark';
+ case DOCUMENT:
+ return 'doc-mode';
+ case FLOW_END:
+ return 'flow-error-end';
+ case SCALAR:
+ return 'scalar';
+ case '---':
+ return 'doc-start';
+ case '...':
+ return 'doc-end';
+ case '':
+ case '\n':
+ case '\r\n':
+ return 'newline';
+ case '-':
+ return 'seq-item-ind';
+ case '?':
+ return 'explicit-key-ind';
+ case ':':
+ return 'map-value-ind';
+ case '{':
+ return 'flow-map-start';
+ case '}':
+ return 'flow-map-end';
+ case '[':
+ return 'flow-seq-start';
+ case ']':
+ return 'flow-seq-end';
+ case ',':
+ return 'comma';
+ }
+ switch (source[0]) {
+ case ' ':
+ case '\t':
+ return 'space';
+ case '#':
+ return 'comment';
+ case '%':
+ return 'directive-line';
+ case '*':
+ return 'alias';
+ case '&':
+ return 'anchor';
+ case '!':
+ return 'tag';
+ case "'":
+ return 'single-quoted-scalar';
+ case '"':
+ return 'double-quoted-scalar';
+ case '|':
+ case '>':
+ return 'block-scalar-header';
+ }
+ return null;
+}
+
+exports.createScalarToken = cstScalar.createScalarToken;
+exports.resolveAsScalar = cstScalar.resolveAsScalar;
+exports.setScalarValue = cstScalar.setScalarValue;
+exports.stringify = cstStringify.stringify;
+exports.visit = cstVisit.visit;
+exports.BOM = BOM;
+exports.DOCUMENT = DOCUMENT;
+exports.FLOW_END = FLOW_END;
+exports.SCALAR = SCALAR;
+exports.isCollection = isCollection;
+exports.isScalar = isScalar;
+exports.prettyToken = prettyToken;
+exports.tokenType = tokenType;
diff --git a/node_modules/yaml/dist/parse/lexer.d.ts b/node_modules/yaml/dist/parse/lexer.d.ts
new file mode 100644
index 0000000..238e7b5
--- /dev/null
+++ b/node_modules/yaml/dist/parse/lexer.d.ts
@@ -0,0 +1,87 @@
+/**
+ * Splits an input string into lexical tokens, i.e. smaller strings that are
+ * easily identifiable by `tokens.tokenType()`.
+ *
+ * Lexing starts always in a "stream" context. Incomplete input may be buffered
+ * until a complete token can be emitted.
+ *
+ * In addition to slices of the original input, the following control characters
+ * may also be emitted:
+ *
+ * - `\x02` (Start of Text): A document starts with the next token
+ * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
+ * - `\x1f` (Unit Separator): Next token is a scalar value
+ * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
+ */
+export declare class Lexer {
+ /**
+ * Flag indicating whether the end of the current buffer marks the end of
+ * all input
+ */
+ private atEnd;
+ /**
+ * Explicit indent set in block scalar header, as an offset from the current
+ * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
+ * explicitly set.
+ */
+ private blockScalarIndent;
+ /**
+ * Block scalars that include a + (keep) chomping indicator in their header
+ * include trailing empty lines, which are otherwise excluded from the
+ * scalar's contents.
+ */
+ private blockScalarKeep;
+ /** Current input */
+ private buffer;
+ /**
+ * Flag noting whether the map value indicator : can immediately follow this
+ * node within a flow context.
+ */
+ private flowKey;
+ /** Count of surrounding flow collection levels. */
+ private flowLevel;
+ /**
+ * Minimum level of indentation required for next lines to be parsed as a
+ * part of the current scalar value.
+ */
+ private indentNext;
+ /** Indentation level of the current line. */
+ private indentValue;
+ /** Position of the next \n character. */
+ private lineEndPos;
+ /** Stores the state of the lexer if reaching the end of incpomplete input */
+ private next;
+ /** A pointer to `buffer`; the current position of the lexer. */
+ private pos;
+ /**
+ * Generate YAML tokens from the `source` string. If `incomplete`,
+ * a part of the last line may be left as a buffer for the next call.
+ *
+ * @returns A generator of lexical tokens
+ */
+ lex(source: string, incomplete?: boolean): Generator<string, void, unknown>;
+ private atLineEnd;
+ private charAt;
+ private continueScalar;
+ private getLine;
+ private hasChars;
+ private setNext;
+ private peek;
+ private parseNext;
+ private parseStream;
+ private parseLineStart;
+ private parseBlockStart;
+ private parseDocument;
+ private parseFlowCollection;
+ private parseQuotedScalar;
+ private parseBlockScalarHeader;
+ private parseBlockScalar;
+ private parsePlainScalar;
+ private pushCount;
+ private pushToIndex;
+ private pushIndicators;
+ private pushTag;
+ private pushNewline;
+ private pushSpaces;
+ private pushUntil;
+}
diff --git a/node_modules/yaml/dist/parse/lexer.js b/node_modules/yaml/dist/parse/lexer.js
new file mode 100644
index 0000000..855c11f
--- /dev/null
+++ b/node_modules/yaml/dist/parse/lexer.js
@@ -0,0 +1,704 @@
+'use strict';
+
+var cst = require('./cst.js');
+
+/*
+START -> stream
+
+stream
+ directive -> line-end -> stream
+ indent + line-end -> stream
+ [else] -> line-start
+
+line-end
+ comment -> line-end
+ newline -> .
+ input-end -> END
+
+line-start
+ doc-start -> doc
+ doc-end -> stream
+ [else] -> indent -> block-start
+
+block-start
+ seq-item-start -> block-start
+ explicit-key-start -> block-start
+ map-value-start -> block-start
+ [else] -> doc
+
+doc
+ line-end -> line-start
+ spaces -> doc
+ anchor -> doc
+ tag -> doc
+ flow-start -> flow -> doc
+ flow-end -> error -> doc
+ seq-item-start -> error -> doc
+ explicit-key-start -> error -> doc
+ map-value-start -> doc
+ alias -> doc
+ quote-start -> quoted-scalar -> doc
+ block-scalar-header -> line-end -> block-scalar(min) -> line-start
+ [else] -> plain-scalar(false, min) -> doc
+
+flow
+ line-end -> flow
+ spaces -> flow
+ anchor -> flow
+ tag -> flow
+ flow-start -> flow -> flow
+ flow-end -> .
+ seq-item-start -> error -> flow
+ explicit-key-start -> flow
+ map-value-start -> flow
+ alias -> flow
+ quote-start -> quoted-scalar -> flow
+ comma -> flow
+ [else] -> plain-scalar(true, 0) -> flow
+
+quoted-scalar
+ quote-end -> .
+ [else] -> quoted-scalar
+
+block-scalar(min)
+ newline + peek(indent < min) -> .
+ [else] -> block-scalar(min)
+
+plain-scalar(is-flow, min)
+ scalar-end(is-flow) -> .
+ peek(newline + (indent < min)) -> .
+ [else] -> plain-scalar(min)
+*/
+function isEmpty(ch) {
+ switch (ch) {
+ case undefined:
+ case ' ':
+ case '\n':
+ case '\r':
+ case '\t':
+ return true;
+ default:
+ return false;
+ }
+}
+const hexDigits = '0123456789ABCDEFabcdef'.split('');
+const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split('');
+const invalidFlowScalarChars = ',[]{}'.split('');
+const invalidAnchorChars = ' ,[]{}\n\r\t'.split('');
+const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);
+/**
+ * Splits an input string into lexical tokens, i.e. smaller strings that are
+ * easily identifiable by `tokens.tokenType()`.
+ *
+ * Lexing starts always in a "stream" context. Incomplete input may be buffered
+ * until a complete token can be emitted.
+ *
+ * In addition to slices of the original input, the following control characters
+ * may also be emitted:
+ *
+ * - `\x02` (Start of Text): A document starts with the next token
+ * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
+ * - `\x1f` (Unit Separator): Next token is a scalar value
+ * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
+ */
+class Lexer {
+ constructor() {
+ /**
+ * Flag indicating whether the end of the current buffer marks the end of
+ * all input
+ */
+ this.atEnd = false;
+ /**
+ * Explicit indent set in block scalar header, as an offset from the current
+ * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
+ * explicitly set.
+ */
+ this.blockScalarIndent = -1;
+ /**
+ * Block scalars that include a + (keep) chomping indicator in their header
+ * include trailing empty lines, which are otherwise excluded from the
+ * scalar's contents.
+ */
+ this.blockScalarKeep = false;
+ /** Current input */
+ this.buffer = '';
+ /**
+ * Flag noting whether the map value indicator : can immediately follow this
+ * node within a flow context.
+ */
+ this.flowKey = false;
+ /** Count of surrounding flow collection levels. */
+ this.flowLevel = 0;
+ /**
+ * Minimum level of indentation required for next lines to be parsed as a
+ * part of the current scalar value.
+ */
+ this.indentNext = 0;
+ /** Indentation level of the current line. */
+ this.indentValue = 0;
+ /** Position of the next \n character. */
+ this.lineEndPos = null;
+ /** Stores the state of the lexer if reaching the end of incpomplete input */
+ this.next = null;
+ /** A pointer to `buffer`; the current position of the lexer. */
+ this.pos = 0;
+ }
+ /**
+ * Generate YAML tokens from the `source` string. If `incomplete`,
+ * a part of the last line may be left as a buffer for the next call.
+ *
+ * @returns A generator of lexical tokens
+ */
+ *lex(source, incomplete = false) {
+ var _a;
+ if (source) {
+ this.buffer = this.buffer ? this.buffer + source : source;
+ this.lineEndPos = null;
+ }
+ this.atEnd = !incomplete;
+ let next = (_a = this.next) !== null && _a !== void 0 ? _a : 'stream';
+ while (next && (incomplete || this.hasChars(1)))
+ next = yield* this.parseNext(next);
+ }
+ atLineEnd() {
+ let i = this.pos;
+ let ch = this.buffer[i];
+ while (ch === ' ' || ch === '\t')
+ ch = this.buffer[++i];
+ if (!ch || ch === '#' || ch === '\n')
+ return true;
+ if (ch === '\r')
+ return this.buffer[i + 1] === '\n';
+ return false;
+ }
+ charAt(n) {
+ return this.buffer[this.pos + n];
+ }
+ continueScalar(offset) {
+ let ch = this.buffer[offset];
+ if (this.indentNext > 0) {
+ let indent = 0;
+ while (ch === ' ')
+ ch = this.buffer[++indent + offset];
+ if (ch === '\r') {
+ const next = this.buffer[indent + offset + 1];
+ if (next === '\n' || (!next && !this.atEnd))
+ return offset + indent + 1;
+ }
+ return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd)
+ ? offset + indent
+ : -1;
+ }
+ if (ch === '-' || ch === '.') {
+ const dt = this.buffer.substr(offset, 3);
+ if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))
+ return -1;
+ }
+ return offset;
+ }
+ getLine() {
+ let end = this.lineEndPos;
+ if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {
+ end = this.buffer.indexOf('\n', this.pos);
+ this.lineEndPos = end;
+ }
+ if (end === -1)
+ return this.atEnd ? this.buffer.substring(this.pos) : null;
+ if (this.buffer[end - 1] === '\r')
+ end -= 1;
+ return this.buffer.substring(this.pos, end);
+ }
+ hasChars(n) {
+ return this.pos + n <= this.buffer.length;
+ }
+ setNext(state) {
+ this.buffer = this.buffer.substring(this.pos);
+ this.pos = 0;
+ this.lineEndPos = null;
+ this.next = state;
+ return null;
+ }
+ peek(n) {
+ return this.buffer.substr(this.pos, n);
+ }
+ *parseNext(next) {
+ switch (next) {
+ case 'stream':
+ return yield* this.parseStream();
+ case 'line-start':
+ return yield* this.parseLineStart();
+ case 'block-start':
+ return yield* this.parseBlockStart();
+ case 'doc':
+ return yield* this.parseDocument();
+ case 'flow':
+ return yield* this.parseFlowCollection();
+ case 'quoted-scalar':
+ return yield* this.parseQuotedScalar();
+ case 'block-scalar':
+ return yield* this.parseBlockScalar();
+ case 'plain-scalar':
+ return yield* this.parsePlainScalar();
+ }
+ }
+ *parseStream() {
+ let line = this.getLine();
+ if (line === null)
+ return this.setNext('stream');
+ if (line[0] === cst.BOM) {
+ yield* this.pushCount(1);
+ line = line.substring(1);
+ }
+ if (line[0] === '%') {
+ let dirEnd = line.length;
+ const cs = line.indexOf('#');
+ if (cs !== -1) {
+ const ch = line[cs - 1];
+ if (ch === ' ' || ch === '\t')
+ dirEnd = cs - 1;
+ }
+ while (true) {
+ const ch = line[dirEnd - 1];
+ if (ch === ' ' || ch === '\t')
+ dirEnd -= 1;
+ else
+ break;
+ }
+ const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));
+ yield* this.pushCount(line.length - n); // possible comment
+ this.pushNewline();
+ return 'stream';
+ }
+ if (this.atLineEnd()) {
+ const sp = yield* this.pushSpaces(true);
+ yield* this.pushCount(line.length - sp);
+ yield* this.pushNewline();
+ return 'stream';
+ }
+ yield cst.DOCUMENT;
+ return yield* this.parseLineStart();
+ }
+ *parseLineStart() {
+ const ch = this.charAt(0);
+ if (!ch && !this.atEnd)
+ return this.setNext('line-start');
+ if (ch === '-' || ch === '.') {
+ if (!this.atEnd && !this.hasChars(4))
+ return this.setNext('line-start');
+ const s = this.peek(3);
+ if (s === '---' && isEmpty(this.charAt(3))) {
+ yield* this.pushCount(3);
+ this.indentValue = 0;
+ this.indentNext = 0;
+ return 'doc';
+ }
+ else if (s === '...' && isEmpty(this.charAt(3))) {
+ yield* this.pushCount(3);
+ return 'stream';
+ }
+ }
+ this.indentValue = yield* this.pushSpaces(false);
+ if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))
+ this.indentNext = this.indentValue;
+ return yield* this.parseBlockStart();
+ }
+ *parseBlockStart() {
+ const [ch0, ch1] = this.peek(2);
+ if (!ch1 && !this.atEnd)
+ return this.setNext('block-start');
+ if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {
+ const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));
+ this.indentNext = this.indentValue + 1;
+ this.indentValue += n;
+ return yield* this.parseBlockStart();
+ }
+ return 'doc';
+ }
+ *parseDocument() {
+ yield* this.pushSpaces(true);
+ const line = this.getLine();
+ if (line === null)
+ return this.setNext('doc');
+ let n = yield* this.pushIndicators();
+ switch (line[n]) {
+ case '#':
+ yield* this.pushCount(line.length - n);
+ // fallthrough
+ case undefined:
+ yield* this.pushNewline();
+ return yield* this.parseLineStart();
+ case '{':
+ case '[':
+ yield* this.pushCount(1);
+ this.flowKey = false;
+ this.flowLevel = 1;
+ return 'flow';
+ case '}':
+ case ']':
+ // this is an error
+ yield* this.pushCount(1);
+ return 'doc';
+ case '*':
+ yield* this.pushUntil(isNotAnchorChar);
+ return 'doc';
+ case '"':
+ case "'":
+ return yield* this.parseQuotedScalar();
+ case '|':
+ case '>':
+ n += yield* this.parseBlockScalarHeader();
+ n += yield* this.pushSpaces(true);
+ yield* this.pushCount(line.length - n);
+ yield* this.pushNewline();
+ return yield* this.parseBlockScalar();
+ default:
+ return yield* this.parsePlainScalar();
+ }
+ }
+ *parseFlowCollection() {
+ let nl, sp;
+ let indent = -1;
+ do {
+ nl = yield* this.pushNewline();
+ if (nl > 0) {
+ sp = yield* this.pushSpaces(false);
+ this.indentValue = indent = sp;
+ }
+ else {
+ sp = 0;
+ }
+ sp += yield* this.pushSpaces(true);
+ } while (nl + sp > 0);
+ const line = this.getLine();
+ if (line === null)
+ return this.setNext('flow');
+ if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||
+ (indent === 0 &&
+ (line.startsWith('---') || line.startsWith('...')) &&
+ isEmpty(line[3]))) {
+ // Allowing for the terminal ] or } at the same (rather than greater)
+ // indent level as the initial [ or { is technically invalid, but
+ // failing here would be surprising to users.
+ const atFlowEndMarker = indent === this.indentNext - 1 &&
+ this.flowLevel === 1 &&
+ (line[0] === ']' || line[0] === '}');
+ if (!atFlowEndMarker) {
+ // this is an error
+ this.flowLevel = 0;
+ yield cst.FLOW_END;
+ return yield* this.parseLineStart();
+ }
+ }
+ let n = 0;
+ while (line[n] === ',') {
+ n += yield* this.pushCount(1);
+ n += yield* this.pushSpaces(true);
+ this.flowKey = false;
+ }
+ n += yield* this.pushIndicators();
+ switch (line[n]) {
+ case undefined:
+ return 'flow';
+ case '#':
+ yield* this.pushCount(line.length - n);
+ return 'flow';
+ case '{':
+ case '[':
+ yield* this.pushCount(1);
+ this.flowKey = false;
+ this.flowLevel += 1;
+ return 'flow';
+ case '}':
+ case ']':
+ yield* this.pushCount(1);
+ this.flowKey = true;
+ this.flowLevel -= 1;
+ return this.flowLevel ? 'flow' : 'doc';
+ case '*':
+ yield* this.pushUntil(isNotAnchorChar);
+ return 'flow';
+ case '"':
+ case "'":
+ this.flowKey = true;
+ return yield* this.parseQuotedScalar();
+ case ':': {
+ const next = this.charAt(1);
+ if (this.flowKey || isEmpty(next) || next === ',') {
+ this.flowKey = false;
+ yield* this.pushCount(1);
+ yield* this.pushSpaces(true);
+ return 'flow';
+ }
+ }
+ // fallthrough
+ default:
+ this.flowKey = false;
+ return yield* this.parsePlainScalar();
+ }
+ }
+ *parseQuotedScalar() {
+ const quote = this.charAt(0);
+ let end = this.buffer.indexOf(quote, this.pos + 1);
+ if (quote === "'") {
+ while (end !== -1 && this.buffer[end + 1] === "'")
+ end = this.buffer.indexOf("'", end + 2);
+ }
+ else {
+ // double-quote
+ while (end !== -1) {
+ let n = 0;
+ while (this.buffer[end - 1 - n] === '\\')
+ n += 1;
+ if (n % 2 === 0)
+ break;
+ end = this.buffer.indexOf('"', end + 1);
+ }
+ }
+ // Only looking for newlines within the quotes
+ const qb = this.buffer.substring(0, end);
+ let nl = qb.indexOf('\n', this.pos);
+ if (nl !== -1) {
+ while (nl !== -1) {
+ const cs = this.continueScalar(nl + 1);
+ if (cs === -1)
+ break;
+ nl = qb.indexOf('\n', cs);
+ }
+ if (nl !== -1) {
+ // this is an error caused by an unexpected unindent
+ end = nl - (qb[nl - 1] === '\r' ? 2 : 1);
+ }
+ }
+ if (end === -1) {
+ if (!this.atEnd)
+ return this.setNext('quoted-scalar');
+ end = this.buffer.length;
+ }
+ yield* this.pushToIndex(end + 1, false);
+ return this.flowLevel ? 'flow' : 'doc';
+ }
+ *parseBlockScalarHeader() {
+ this.blockScalarIndent = -1;
+ this.blockScalarKeep = false;
+ let i = this.pos;
+ while (true) {
+ const ch = this.buffer[++i];
+ if (ch === '+')
+ this.blockScalarKeep = true;
+ else if (ch > '0' && ch <= '9')
+ this.blockScalarIndent = Number(ch) - 1;
+ else if (ch !== '-')
+ break;
+ }
+ return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');
+ }
+ *parseBlockScalar() {
+ let nl = this.pos - 1; // may be -1 if this.pos === 0
+ let indent = 0;
+ let ch;
+ loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {
+ switch (ch) {
+ case ' ':
+ indent += 1;
+ break;
+ case '\n':
+ nl = i;
+ indent = 0;
+ break;
+ case '\r': {
+ const next = this.buffer[i + 1];
+ if (!next && !this.atEnd)
+ return this.setNext('block-scalar');
+ if (next === '\n')
+ break;
+ } // fallthrough
+ default:
+ break loop;
+ }
+ }
+ if (!ch && !this.atEnd)
+ return this.setNext('block-scalar');
+ if (indent >= this.indentNext) {
+ if (this.blockScalarIndent === -1)
+ this.indentNext = indent;
+ else
+ this.indentNext += this.blockScalarIndent;
+ do {
+ const cs = this.continueScalar(nl + 1);
+ if (cs === -1)
+ break;
+ nl = this.buffer.indexOf('\n', cs);
+ } while (nl !== -1);
+ if (nl === -1) {
+ if (!this.atEnd)
+ return this.setNext('block-scalar');
+ nl = this.buffer.length;
+ }
+ }
+ if (!this.blockScalarKeep) {
+ do {
+ let i = nl - 1;
+ let ch = this.buffer[i];
+ if (ch === '\r')
+ ch = this.buffer[--i];
+ const lastChar = i; // Drop the line if last char not more indented
+ while (ch === ' ' || ch === '\t')
+ ch = this.buffer[--i];
+ if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
+ nl = i;
+ else
+ break;
+ } while (true);
+ }
+ yield cst.SCALAR;
+ yield* this.pushToIndex(nl + 1, true);
+ return yield* this.parseLineStart();
+ }
+ *parsePlainScalar() {
+ const inFlow = this.flowLevel > 0;
+ let end = this.pos - 1;
+ let i = this.pos - 1;
+ let ch;
+ while ((ch = this.buffer[++i])) {
+ if (ch === ':') {
+ const next = this.buffer[i + 1];
+ if (isEmpty(next) || (inFlow && next === ','))
+ break;
+ end = i;
+ }
+ else if (isEmpty(ch)) {
+ let next = this.buffer[i + 1];
+ if (ch === '\r') {
+ if (next === '\n') {
+ i += 1;
+ ch = '\n';
+ next = this.buffer[i + 1];
+ }
+ else
+ end = i;
+ }
+ if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))
+ break;
+ if (ch === '\n') {
+ const cs = this.continueScalar(i + 1);
+ if (cs === -1)
+ break;
+ i = Math.max(i, cs - 2); // to advance, but still account for ' #'
+ }
+ }
+ else {
+ if (inFlow && invalidFlowScalarChars.includes(ch))
+ break;
+ end = i;
+ }
+ }
+ if (!ch && !this.atEnd)
+ return this.setNext('plain-scalar');
+ yield cst.SCALAR;
+ yield* this.pushToIndex(end + 1, true);
+ return inFlow ? 'flow' : 'doc';
+ }
+ *pushCount(n) {
+ if (n > 0) {
+ yield this.buffer.substr(this.pos, n);
+ this.pos += n;
+ return n;
+ }
+ return 0;
+ }
+ *pushToIndex(i, allowEmpty) {
+ const s = this.buffer.slice(this.pos, i);
+ if (s) {
+ yield s;
+ this.pos += s.length;
+ return s.length;
+ }
+ else if (allowEmpty)
+ yield '';
+ return 0;
+ }
+ *pushIndicators() {
+ switch (this.charAt(0)) {
+ case '!':
+ return ((yield* this.pushTag()) +
+ (yield* this.pushSpaces(true)) +
+ (yield* this.pushIndicators()));
+ case '&':
+ return ((yield* this.pushUntil(isNotAnchorChar)) +
+ (yield* this.pushSpaces(true)) +
+ (yield* this.pushIndicators()));
+ case '-': // this is an error
+ case '?': // this is an error outside flow collections
+ case ':': {
+ const inFlow = this.flowLevel > 0;
+ const ch1 = this.charAt(1);
+ if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {
+ if (!inFlow)
+ this.indentNext = this.indentValue + 1;
+ else if (this.flowKey)
+ this.flowKey = false;
+ return ((yield* this.pushCount(1)) +
+ (yield* this.pushSpaces(true)) +
+ (yield* this.pushIndicators()));
+ }
+ }
+ }
+ return 0;
+ }
+ *pushTag() {
+ if (this.charAt(1) === '<') {
+ let i = this.pos + 2;
+ let ch = this.buffer[i];
+ while (!isEmpty(ch) && ch !== '>')
+ ch = this.buffer[++i];
+ return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);
+ }
+ else {
+ let i = this.pos + 1;
+ let ch = this.buffer[i];
+ while (ch) {
+ if (tagChars.includes(ch))
+ ch = this.buffer[++i];
+ else if (ch === '%' &&
+ hexDigits.includes(this.buffer[i + 1]) &&
+ hexDigits.includes(this.buffer[i + 2])) {
+ ch = this.buffer[(i += 3)];
+ }
+ else
+ break;
+ }
+ return yield* this.pushToIndex(i, false);
+ }
+ }
+ *pushNewline() {
+ const ch = this.buffer[this.pos];
+ if (ch === '\n')
+ return yield* this.pushCount(1);
+ else if (ch === '\r' && this.charAt(1) === '\n')
+ return yield* this.pushCount(2);
+ else
+ return 0;
+ }
+ *pushSpaces(allowTabs) {
+ let i = this.pos - 1;
+ let ch;
+ do {
+ ch = this.buffer[++i];
+ } while (ch === ' ' || (allowTabs && ch === '\t'));
+ const n = i - this.pos;
+ if (n > 0) {
+ yield this.buffer.substr(this.pos, n);
+ this.pos = i;
+ }
+ return n;
+ }
+ *pushUntil(test) {
+ let i = this.pos;
+ let ch = this.buffer[i];
+ while (!test(ch))
+ ch = this.buffer[++i];
+ return yield* this.pushToIndex(i, false);
+ }
+}
+
+exports.Lexer = Lexer;
diff --git a/node_modules/yaml/dist/parse/line-counter.d.ts b/node_modules/yaml/dist/parse/line-counter.d.ts
new file mode 100644
index 0000000..b469095
--- /dev/null
+++ b/node_modules/yaml/dist/parse/line-counter.d.ts
@@ -0,0 +1,22 @@
+/**
+ * Tracks newlines during parsing in order to provide an efficient API for
+ * determining the one-indexed `{ line, col }` position for any offset
+ * within the input.
+ */
+export declare class LineCounter {
+ lineStarts: number[];
+ /**
+ * Should be called in ascending order. Otherwise, call
+ * `lineCounter.lineStarts.sort()` before calling `linePos()`.
+ */
+ addNewLine: (offset: number) => number;
+ /**
+ * Performs a binary search and returns the 1-indexed { line, col }
+ * position of `offset`. If `line === 0`, `addNewLine` has never been
+ * called or `offset` is before the first known newline.
+ */
+ linePos: (offset: number) => {
+ line: number;
+ col: number;
+ };
+}
diff --git a/node_modules/yaml/dist/parse/line-counter.js b/node_modules/yaml/dist/parse/line-counter.js
new file mode 100644
index 0000000..0e7383b
--- /dev/null
+++ b/node_modules/yaml/dist/parse/line-counter.js
@@ -0,0 +1,41 @@
+'use strict';
+
+/**
+ * Tracks newlines during parsing in order to provide an efficient API for
+ * determining the one-indexed `{ line, col }` position for any offset
+ * within the input.
+ */
+class LineCounter {
+ constructor() {
+ this.lineStarts = [];
+ /**
+ * Should be called in ascending order. Otherwise, call
+ * `lineCounter.lineStarts.sort()` before calling `linePos()`.
+ */
+ this.addNewLine = (offset) => this.lineStarts.push(offset);
+ /**
+ * Performs a binary search and returns the 1-indexed { line, col }
+ * position of `offset`. If `line === 0`, `addNewLine` has never been
+ * called or `offset` is before the first known newline.
+ */
+ this.linePos = (offset) => {
+ let low = 0;
+ let high = this.lineStarts.length;
+ while (low < high) {
+ const mid = (low + high) >> 1; // Math.floor((low + high) / 2)
+ if (this.lineStarts[mid] < offset)
+ low = mid + 1;
+ else
+ high = mid;
+ }
+ if (this.lineStarts[low] === offset)
+ return { line: low + 1, col: 1 };
+ if (low === 0)
+ return { line: 0, col: offset };
+ const start = this.lineStarts[low - 1];
+ return { line: low, col: offset - start + 1 };
+ };
+ }
+}
+
+exports.LineCounter = LineCounter;
diff --git a/node_modules/yaml/dist/parse/parser.d.ts b/node_modules/yaml/dist/parse/parser.d.ts
new file mode 100644
index 0000000..8f3159f
--- /dev/null
+++ b/node_modules/yaml/dist/parse/parser.d.ts
@@ -0,0 +1,84 @@
+import { Token } from './cst.js';
+/**
+ * A YAML concrete syntax tree (CST) parser
+ *
+ * ```ts
+ * const src: string = ...
+ * for (const token of new Parser().parse(src)) {
+ * // token: Token
+ * }
+ * ```
+ *
+ * To use the parser with a user-provided lexer:
+ *
+ * ```ts
+ * function* parse(source: string, lexer: Lexer) {
+ * const parser = new Parser()
+ * for (const lexeme of lexer.lex(source))
+ * yield* parser.next(lexeme)
+ * yield* parser.end()
+ * }
+ *
+ * const src: string = ...
+ * const lexer = new Lexer()
+ * for (const token of parse(src, lexer)) {
+ * // token: Token
+ * }
+ * ```
+ */
+export declare class Parser {
+ private onNewLine?;
+ /** If true, space and sequence indicators count as indentation */
+ private atNewLine;
+ /** If true, next token is a scalar value */
+ private atScalar;
+ /** Current indentation level */
+ private indent;
+ /** Current offset since the start of parsing */
+ offset: number;
+ /** On the same line with a block map key */
+ private onKeyLine;
+ /** Top indicates the node that's currently being built */
+ stack: Token[];
+ /** The source of the current token, set in parse() */
+ private source;
+ /** The type of the current token, set in parse() */
+ private type;
+ /**
+ * @param onNewLine - If defined, called separately with the start position of
+ * each new line (in `parse()`, including the start of input).
+ */
+ constructor(onNewLine?: (offset: number) => void);
+ /**
+ * Parse `source` as a YAML stream.
+ * If `incomplete`, a part of the last line may be left as a buffer for the next call.
+ *
+ * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
+ *
+ * @returns A generator of tokens representing each directive, document, and other structure.
+ */
+ parse(source: string, incomplete?: boolean): Generator<Token, void, unknown>;
+ /**
+ * Advance the parser by the `source` of one lexical token.
+ */
+ next(source: string): Generator<Token, void, unknown>;
+ private lexer;
+ /** Call at end of input to push out any remaining constructions */
+ end(): Generator<Token, void, unknown>;
+ private get sourceToken();
+ private step;
+ private peek;
+ private pop;
+ private stream;
+ private document;
+ private scalar;
+ private blockScalar;
+ private blockMap;
+ private blockSequence;
+ private flowCollection;
+ private flowScalar;
+ private startBlockValue;
+ private atIndentedComment;
+ private documentEnd;
+ private lineEnd;
+}
diff --git a/node_modules/yaml/dist/parse/parser.js b/node_modules/yaml/dist/parse/parser.js
new file mode 100644
index 0000000..0873092
--- /dev/null
+++ b/node_modules/yaml/dist/parse/parser.js
@@ -0,0 +1,958 @@
+'use strict';
+
+var cst = require('./cst.js');
+var lexer = require('./lexer.js');
+
+function includesToken(list, type) {
+ for (let i = 0; i < list.length; ++i)
+ if (list[i].type === type)
+ return true;
+ return false;
+}
+function findNonEmptyIndex(list) {
+ for (let i = 0; i < list.length; ++i) {
+ switch (list[i].type) {
+ case 'space':
+ case 'comment':
+ case 'newline':
+ break;
+ default:
+ return i;
+ }
+ }
+ return -1;
+}
+function isFlowToken(token) {
+ switch (token === null || token === void 0 ? void 0 : token.type) {
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ case 'flow-collection':
+ return true;
+ default:
+ return false;
+ }
+}
+function getPrevProps(parent) {
+ var _a;
+ switch (parent.type) {
+ case 'document':
+ return parent.start;
+ case 'block-map': {
+ const it = parent.items[parent.items.length - 1];
+ return (_a = it.sep) !== null && _a !== void 0 ? _a : it.start;
+ }
+ case 'block-seq':
+ return parent.items[parent.items.length - 1].start;
+ /* istanbul ignore next should not happen */
+ default:
+ return [];
+ }
+}
+/** Note: May modify input array */
+function getFirstKeyStartProps(prev) {
+ var _a;
+ if (prev.length === 0)
+ return [];
+ let i = prev.length;
+ loop: while (--i >= 0) {
+ switch (prev[i].type) {
+ case 'doc-start':
+ case 'explicit-key-ind':
+ case 'map-value-ind':
+ case 'seq-item-ind':
+ case 'newline':
+ break loop;
+ }
+ }
+ while (((_a = prev[++i]) === null || _a === void 0 ? void 0 : _a.type) === 'space') {
+ /* loop */
+ }
+ return prev.splice(i, prev.length);
+}
+function fixFlowSeqItems(fc) {
+ if (fc.start.type === 'flow-seq-start') {
+ for (const it of fc.items) {
+ if (it.sep &&
+ !it.value &&
+ !includesToken(it.start, 'explicit-key-ind') &&
+ !includesToken(it.sep, 'map-value-ind')) {
+ if (it.key)
+ it.value = it.key;
+ delete it.key;
+ if (isFlowToken(it.value)) {
+ if (it.value.end)
+ Array.prototype.push.apply(it.value.end, it.sep);
+ else
+ it.value.end = it.sep;
+ }
+ else
+ Array.prototype.push.apply(it.start, it.sep);
+ delete it.sep;
+ }
+ }
+ }
+}
+/**
+ * A YAML concrete syntax tree (CST) parser
+ *
+ * ```ts
+ * const src: string = ...
+ * for (const token of new Parser().parse(src)) {
+ * // token: Token
+ * }
+ * ```
+ *
+ * To use the parser with a user-provided lexer:
+ *
+ * ```ts
+ * function* parse(source: string, lexer: Lexer) {
+ * const parser = new Parser()
+ * for (const lexeme of lexer.lex(source))
+ * yield* parser.next(lexeme)
+ * yield* parser.end()
+ * }
+ *
+ * const src: string = ...
+ * const lexer = new Lexer()
+ * for (const token of parse(src, lexer)) {
+ * // token: Token
+ * }
+ * ```
+ */
+class Parser {
+ /**
+ * @param onNewLine - If defined, called separately with the start position of
+ * each new line (in `parse()`, including the start of input).
+ */
+ constructor(onNewLine) {
+ /** If true, space and sequence indicators count as indentation */
+ this.atNewLine = true;
+ /** If true, next token is a scalar value */
+ this.atScalar = false;
+ /** Current indentation level */
+ this.indent = 0;
+ /** Current offset since the start of parsing */
+ this.offset = 0;
+ /** On the same line with a block map key */
+ this.onKeyLine = false;
+ /** Top indicates the node that's currently being built */
+ this.stack = [];
+ /** The source of the current token, set in parse() */
+ this.source = '';
+ /** The type of the current token, set in parse() */
+ this.type = '';
+ // Must be defined after `next()`
+ this.lexer = new lexer.Lexer();
+ this.onNewLine = onNewLine;
+ }
+ /**
+ * Parse `source` as a YAML stream.
+ * If `incomplete`, a part of the last line may be left as a buffer for the next call.
+ *
+ * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
+ *
+ * @returns A generator of tokens representing each directive, document, and other structure.
+ */
+ *parse(source, incomplete = false) {
+ if (this.onNewLine && this.offset === 0)
+ this.onNewLine(0);
+ for (const lexeme of this.lexer.lex(source, incomplete))
+ yield* this.next(lexeme);
+ if (!incomplete)
+ yield* this.end();
+ }
+ /**
+ * Advance the parser by the `source` of one lexical token.
+ */
+ *next(source) {
+ this.source = source;
+ if (process.env.LOG_TOKENS)
+ console.log('|', cst.prettyToken(source));
+ if (this.atScalar) {
+ this.atScalar = false;
+ yield* this.step();
+ this.offset += source.length;
+ return;
+ }
+ const type = cst.tokenType(source);
+ if (!type) {
+ const message = `Not a YAML token: ${source}`;
+ yield* this.pop({ type: 'error', offset: this.offset, message, source });
+ this.offset += source.length;
+ }
+ else if (type === 'scalar') {
+ this.atNewLine = false;
+ this.atScalar = true;
+ this.type = 'scalar';
+ }
+ else {
+ this.type = type;
+ yield* this.step();
+ switch (type) {
+ case 'newline':
+ this.atNewLine = true;
+ this.indent = 0;
+ if (this.onNewLine)
+ this.onNewLine(this.offset + source.length);
+ break;
+ case 'space':
+ if (this.atNewLine && source[0] === ' ')
+ this.indent += source.length;
+ break;
+ case 'explicit-key-ind':
+ case 'map-value-ind':
+ case 'seq-item-ind':
+ if (this.atNewLine)
+ this.indent += source.length;
+ break;
+ case 'doc-mode':
+ case 'flow-error-end':
+ return;
+ default:
+ this.atNewLine = false;
+ }
+ this.offset += source.length;
+ }
+ }
+ /** Call at end of input to push out any remaining constructions */
+ *end() {
+ while (this.stack.length > 0)
+ yield* this.pop();
+ }
+ get sourceToken() {
+ const st = {
+ type: this.type,
+ offset: this.offset,
+ indent: this.indent,
+ source: this.source
+ };
+ return st;
+ }
+ *step() {
+ const top = this.peek(1);
+ if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {
+ while (this.stack.length > 0)
+ yield* this.pop();
+ this.stack.push({
+ type: 'doc-end',
+ offset: this.offset,
+ source: this.source
+ });
+ return;
+ }
+ if (!top)
+ return yield* this.stream();
+ switch (top.type) {
+ case 'document':
+ return yield* this.document(top);
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ return yield* this.scalar(top);
+ case 'block-scalar':
+ return yield* this.blockScalar(top);
+ case 'block-map':
+ return yield* this.blockMap(top);
+ case 'block-seq':
+ return yield* this.blockSequence(top);
+ case 'flow-collection':
+ return yield* this.flowCollection(top);
+ case 'doc-end':
+ return yield* this.documentEnd(top);
+ }
+ /* istanbul ignore next should not happen */
+ yield* this.pop();
+ }
+ peek(n) {
+ return this.stack[this.stack.length - n];
+ }
+ *pop(error) {
+ const token = error !== null && error !== void 0 ? error : this.stack.pop();
+ /* istanbul ignore if should not happen */
+ if (!token) {
+ const message = 'Tried to pop an empty stack';
+ yield { type: 'error', offset: this.offset, source: '', message };
+ }
+ else if (this.stack.length === 0) {
+ yield token;
+ }
+ else {
+ const top = this.peek(1);
+ if (token.type === 'block-scalar') {
+ // Block scalars use their parent rather than header indent
+ token.indent = 'indent' in top ? top.indent : 0;
+ }
+ else if (token.type === 'flow-collection' && top.type === 'document') {
+ // Ignore all indent for top-level flow collections
+ token.indent = 0;
+ }
+ if (token.type === 'flow-collection')
+ fixFlowSeqItems(token);
+ switch (top.type) {
+ case 'document':
+ top.value = token;
+ break;
+ case 'block-scalar':
+ top.props.push(token); // error
+ break;
+ case 'block-map': {
+ const it = top.items[top.items.length - 1];
+ if (it.value) {
+ top.items.push({ start: [], key: token, sep: [] });
+ this.onKeyLine = true;
+ return;
+ }
+ else if (it.sep) {
+ it.value = token;
+ }
+ else {
+ Object.assign(it, { key: token, sep: [] });
+ this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');
+ return;
+ }
+ break;
+ }
+ case 'block-seq': {
+ const it = top.items[top.items.length - 1];
+ if (it.value)
+ top.items.push({ start: [], value: token });
+ else
+ it.value = token;
+ break;
+ }
+ case 'flow-collection': {
+ const it = top.items[top.items.length - 1];
+ if (!it || it.value)
+ top.items.push({ start: [], key: token, sep: [] });
+ else if (it.sep)
+ it.value = token;
+ else
+ Object.assign(it, { key: token, sep: [] });
+ return;
+ }
+ /* istanbul ignore next should not happen */
+ default:
+ yield* this.pop();
+ yield* this.pop(token);
+ }
+ if ((top.type === 'document' ||
+ top.type === 'block-map' ||
+ top.type === 'block-seq') &&
+ (token.type === 'block-map' || token.type === 'block-seq')) {
+ const last = token.items[token.items.length - 1];
+ if (last &&
+ !last.sep &&
+ !last.value &&
+ last.start.length > 0 &&
+ findNonEmptyIndex(last.start) === -1 &&
+ (token.indent === 0 ||
+ last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {
+ if (top.type === 'document')
+ top.end = last.start;
+ else
+ top.items.push({ start: last.start });
+ token.items.splice(-1, 1);
+ }
+ }
+ }
+ }
+ *stream() {
+ switch (this.type) {
+ case 'directive-line':
+ yield { type: 'directive', offset: this.offset, source: this.source };
+ return;
+ case 'byte-order-mark':
+ case 'space':
+ case 'comment':
+ case 'newline':
+ yield this.sourceToken;
+ return;
+ case 'doc-mode':
+ case 'doc-start': {
+ const doc = {
+ type: 'document',
+ offset: this.offset,
+ start: []
+ };
+ if (this.type === 'doc-start')
+ doc.start.push(this.sourceToken);
+ this.stack.push(doc);
+ return;
+ }
+ }
+ yield {
+ type: 'error',
+ offset: this.offset,
+ message: `Unexpected ${this.type} token in YAML stream`,
+ source: this.source
+ };
+ }
+ *document(doc) {
+ if (doc.value)
+ return yield* this.lineEnd(doc);
+ switch (this.type) {
+ case 'doc-start': {
+ if (findNonEmptyIndex(doc.start) !== -1) {
+ yield* this.pop();
+ yield* this.step();
+ }
+ else
+ doc.start.push(this.sourceToken);
+ return;
+ }
+ case 'anchor':
+ case 'tag':
+ case 'space':
+ case 'comment':
+ case 'newline':
+ doc.start.push(this.sourceToken);
+ return;
+ }
+ const bv = this.startBlockValue(doc);
+ if (bv)
+ this.stack.push(bv);
+ else {
+ yield {
+ type: 'error',
+ offset: this.offset,
+ message: `Unexpected ${this.type} token in YAML document`,
+ source: this.source
+ };
+ }
+ }
+ *scalar(scalar) {
+ if (this.type === 'map-value-ind') {
+ const prev = getPrevProps(this.peek(2));
+ const start = getFirstKeyStartProps(prev);
+ let sep;
+ if (scalar.end) {
+ sep = scalar.end;
+ sep.push(this.sourceToken);
+ delete scalar.end;
+ }
+ else
+ sep = [this.sourceToken];
+ const map = {
+ type: 'block-map',
+ offset: scalar.offset,
+ indent: scalar.indent,
+ items: [{ start, key: scalar, sep }]
+ };
+ this.onKeyLine = true;
+ this.stack[this.stack.length - 1] = map;
+ }
+ else
+ yield* this.lineEnd(scalar);
+ }
+ *blockScalar(scalar) {
+ switch (this.type) {
+ case 'space':
+ case 'comment':
+ case 'newline':
+ scalar.props.push(this.sourceToken);
+ return;
+ case 'scalar':
+ scalar.source = this.source;
+ // block-scalar source includes trailing newline
+ this.atNewLine = true;
+ this.indent = 0;
+ if (this.onNewLine) {
+ let nl = this.source.indexOf('\n') + 1;
+ while (nl !== 0) {
+ this.onNewLine(this.offset + nl);
+ nl = this.source.indexOf('\n', nl) + 1;
+ }
+ }
+ yield* this.pop();
+ break;
+ /* istanbul ignore next should not happen */
+ default:
+ yield* this.pop();
+ yield* this.step();
+ }
+ }
+ *blockMap(map) {
+ var _a;
+ const it = map.items[map.items.length - 1];
+ // it.sep is true-ish if pair already has key or : separator
+ switch (this.type) {
+ case 'newline':
+ this.onKeyLine = false;
+ if (it.value) {
+ const end = 'end' in it.value ? it.value.end : undefined;
+ const last = Array.isArray(end) ? end[end.length - 1] : undefined;
+ if ((last === null || last === void 0 ? void 0 : last.type) === 'comment')
+ end === null || end === void 0 ? void 0 : end.push(this.sourceToken);
+ else
+ map.items.push({ start: [this.sourceToken] });
+ }
+ else if (it.sep) {
+ it.sep.push(this.sourceToken);
+ }
+ else {
+ it.start.push(this.sourceToken);
+ }
+ return;
+ case 'space':
+ case 'comment':
+ if (it.value) {
+ map.items.push({ start: [this.sourceToken] });
+ }
+ else if (it.sep) {
+ it.sep.push(this.sourceToken);
+ }
+ else {
+ if (this.atIndentedComment(it.start, map.indent)) {
+ const prev = map.items[map.items.length - 2];
+ const end = (_a = prev === null || prev === void 0 ? void 0 : prev.value) === null || _a === void 0 ? void 0 : _a.end;
+ if (Array.isArray(end)) {
+ Array.prototype.push.apply(end, it.start);
+ end.push(this.sourceToken);
+ map.items.pop();
+ return;
+ }
+ }
+ it.start.push(this.sourceToken);
+ }
+ return;
+ }
+ if (this.indent >= map.indent) {
+ const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;
+ // For empty nodes, assign newline-separated not indented empty tokens to following node
+ let start = [];
+ if (atNextItem && it.sep && !it.value) {
+ const nl = [];
+ for (let i = 0; i < it.sep.length; ++i) {
+ const st = it.sep[i];
+ switch (st.type) {
+ case 'newline':
+ nl.push(i);
+ break;
+ case 'space':
+ break;
+ case 'comment':
+ if (st.indent > map.indent)
+ nl.length = 0;
+ break;
+ default:
+ nl.length = 0;
+ }
+ }
+ if (nl.length >= 2)
+ start = it.sep.splice(nl[1]);
+ }
+ switch (this.type) {
+ case 'anchor':
+ case 'tag':
+ if (atNextItem || it.value) {
+ start.push(this.sourceToken);
+ map.items.push({ start });
+ this.onKeyLine = true;
+ }
+ else if (it.sep) {
+ it.sep.push(this.sourceToken);
+ }
+ else {
+ it.start.push(this.sourceToken);
+ }
+ return;
+ case 'explicit-key-ind':
+ if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {
+ it.start.push(this.sourceToken);
+ }
+ else if (atNextItem || it.value) {
+ start.push(this.sourceToken);
+ map.items.push({ start });
+ }
+ else {
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start: [this.sourceToken] }]
+ });
+ }
+ this.onKeyLine = true;
+ return;
+ case 'map-value-ind':
+ if (includesToken(it.start, 'explicit-key-ind')) {
+ if (!it.sep) {
+ if (includesToken(it.start, 'newline')) {
+ Object.assign(it, { key: null, sep: [this.sourceToken] });
+ }
+ else {
+ const start = getFirstKeyStartProps(it.start);
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start, key: null, sep: [this.sourceToken] }]
+ });
+ }
+ }
+ else if (it.value) {
+ map.items.push({ start: [], key: null, sep: [this.sourceToken] });
+ }
+ else if (includesToken(it.sep, 'map-value-ind')) {
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start, key: null, sep: [this.sourceToken] }]
+ });
+ }
+ else if (isFlowToken(it.key) &&
+ !includesToken(it.sep, 'newline')) {
+ const start = getFirstKeyStartProps(it.start);
+ const key = it.key;
+ const sep = it.sep;
+ sep.push(this.sourceToken);
+ // @ts-expect-error type guard is wrong here
+ delete it.key, delete it.sep;
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start, key, sep }]
+ });
+ }
+ else if (start.length > 0) {
+ // Not actually at next item
+ it.sep = it.sep.concat(start, this.sourceToken);
+ }
+ else {
+ it.sep.push(this.sourceToken);
+ }
+ }
+ else {
+ if (!it.sep) {
+ Object.assign(it, { key: null, sep: [this.sourceToken] });
+ }
+ else if (it.value || atNextItem) {
+ map.items.push({ start, key: null, sep: [this.sourceToken] });
+ }
+ else if (includesToken(it.sep, 'map-value-ind')) {
+ this.stack.push({
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start: [], key: null, sep: [this.sourceToken] }]
+ });
+ }
+ else {
+ it.sep.push(this.sourceToken);
+ }
+ }
+ this.onKeyLine = true;
+ return;
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar': {
+ const fs = this.flowScalar(this.type);
+ if (atNextItem || it.value) {
+ map.items.push({ start, key: fs, sep: [] });
+ this.onKeyLine = true;
+ }
+ else if (it.sep) {
+ this.stack.push(fs);
+ }
+ else {
+ Object.assign(it, { key: fs, sep: [] });
+ this.onKeyLine = true;
+ }
+ return;
+ }
+ default: {
+ const bv = this.startBlockValue(map);
+ if (bv) {
+ if (atNextItem &&
+ bv.type !== 'block-seq' &&
+ includesToken(it.start, 'explicit-key-ind')) {
+ map.items.push({ start });
+ }
+ this.stack.push(bv);
+ return;
+ }
+ }
+ }
+ }
+ yield* this.pop();
+ yield* this.step();
+ }
+ *blockSequence(seq) {
+ var _a;
+ const it = seq.items[seq.items.length - 1];
+ switch (this.type) {
+ case 'newline':
+ if (it.value) {
+ const end = 'end' in it.value ? it.value.end : undefined;
+ const last = Array.isArray(end) ? end[end.length - 1] : undefined;
+ if ((last === null || last === void 0 ? void 0 : last.type) === 'comment')
+ end === null || end === void 0 ? void 0 : end.push(this.sourceToken);
+ else
+ seq.items.push({ start: [this.sourceToken] });
+ }
+ else
+ it.start.push(this.sourceToken);
+ return;
+ case 'space':
+ case 'comment':
+ if (it.value)
+ seq.items.push({ start: [this.sourceToken] });
+ else {
+ if (this.atIndentedComment(it.start, seq.indent)) {
+ const prev = seq.items[seq.items.length - 2];
+ const end = (_a = prev === null || prev === void 0 ? void 0 : prev.value) === null || _a === void 0 ? void 0 : _a.end;
+ if (Array.isArray(end)) {
+ Array.prototype.push.apply(end, it.start);
+ end.push(this.sourceToken);
+ seq.items.pop();
+ return;
+ }
+ }
+ it.start.push(this.sourceToken);
+ }
+ return;
+ case 'anchor':
+ case 'tag':
+ if (it.value || this.indent <= seq.indent)
+ break;
+ it.start.push(this.sourceToken);
+ return;
+ case 'seq-item-ind':
+ if (this.indent !== seq.indent)
+ break;
+ if (it.value || includesToken(it.start, 'seq-item-ind'))
+ seq.items.push({ start: [this.sourceToken] });
+ else
+ it.start.push(this.sourceToken);
+ return;
+ }
+ if (this.indent > seq.indent) {
+ const bv = this.startBlockValue(seq);
+ if (bv) {
+ this.stack.push(bv);
+ return;
+ }
+ }
+ yield* this.pop();
+ yield* this.step();
+ }
+ *flowCollection(fc) {
+ const it = fc.items[fc.items.length - 1];
+ if (this.type === 'flow-error-end') {
+ let top;
+ do {
+ yield* this.pop();
+ top = this.peek(1);
+ } while (top && top.type === 'flow-collection');
+ }
+ else if (fc.end.length === 0) {
+ switch (this.type) {
+ case 'comma':
+ case 'explicit-key-ind':
+ if (!it || it.sep)
+ fc.items.push({ start: [this.sourceToken] });
+ else
+ it.start.push(this.sourceToken);
+ return;
+ case 'map-value-ind':
+ if (!it || it.value)
+ fc.items.push({ start: [], key: null, sep: [this.sourceToken] });
+ else if (it.sep)
+ it.sep.push(this.sourceToken);
+ else
+ Object.assign(it, { key: null, sep: [this.sourceToken] });
+ return;
+ case 'space':
+ case 'comment':
+ case 'newline':
+ case 'anchor':
+ case 'tag':
+ if (!it || it.value)
+ fc.items.push({ start: [this.sourceToken] });
+ else if (it.sep)
+ it.sep.push(this.sourceToken);
+ else
+ it.start.push(this.sourceToken);
+ return;
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar': {
+ const fs = this.flowScalar(this.type);
+ if (!it || it.value)
+ fc.items.push({ start: [], key: fs, sep: [] });
+ else if (it.sep)
+ this.stack.push(fs);
+ else
+ Object.assign(it, { key: fs, sep: [] });
+ return;
+ }
+ case 'flow-map-end':
+ case 'flow-seq-end':
+ fc.end.push(this.sourceToken);
+ return;
+ }
+ const bv = this.startBlockValue(fc);
+ /* istanbul ignore else should not happen */
+ if (bv)
+ this.stack.push(bv);
+ else {
+ yield* this.pop();
+ yield* this.step();
+ }
+ }
+ else {
+ const parent = this.peek(2);
+ if (parent.type === 'block-map' &&
+ ((this.type === 'map-value-ind' && parent.indent === fc.indent) ||
+ (this.type === 'newline' &&
+ !parent.items[parent.items.length - 1].sep))) {
+ yield* this.pop();
+ yield* this.step();
+ }
+ else if (this.type === 'map-value-ind' &&
+ parent.type !== 'flow-collection') {
+ const prev = getPrevProps(parent);
+ const start = getFirstKeyStartProps(prev);
+ fixFlowSeqItems(fc);
+ const sep = fc.end.splice(1, fc.end.length);
+ sep.push(this.sourceToken);
+ const map = {
+ type: 'block-map',
+ offset: fc.offset,
+ indent: fc.indent,
+ items: [{ start, key: fc, sep }]
+ };
+ this.onKeyLine = true;
+ this.stack[this.stack.length - 1] = map;
+ }
+ else {
+ yield* this.lineEnd(fc);
+ }
+ }
+ }
+ flowScalar(type) {
+ if (this.onNewLine) {
+ let nl = this.source.indexOf('\n') + 1;
+ while (nl !== 0) {
+ this.onNewLine(this.offset + nl);
+ nl = this.source.indexOf('\n', nl) + 1;
+ }
+ }
+ return {
+ type,
+ offset: this.offset,
+ indent: this.indent,
+ source: this.source
+ };
+ }
+ startBlockValue(parent) {
+ switch (this.type) {
+ case 'alias':
+ case 'scalar':
+ case 'single-quoted-scalar':
+ case 'double-quoted-scalar':
+ return this.flowScalar(this.type);
+ case 'block-scalar-header':
+ return {
+ type: 'block-scalar',
+ offset: this.offset,
+ indent: this.indent,
+ props: [this.sourceToken],
+ source: ''
+ };
+ case 'flow-map-start':
+ case 'flow-seq-start':
+ return {
+ type: 'flow-collection',
+ offset: this.offset,
+ indent: this.indent,
+ start: this.sourceToken,
+ items: [],
+ end: []
+ };
+ case 'seq-item-ind':
+ return {
+ type: 'block-seq',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start: [this.sourceToken] }]
+ };
+ case 'explicit-key-ind': {
+ this.onKeyLine = true;
+ const prev = getPrevProps(parent);
+ const start = getFirstKeyStartProps(prev);
+ start.push(this.sourceToken);
+ return {
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start }]
+ };
+ }
+ case 'map-value-ind': {
+ this.onKeyLine = true;
+ const prev = getPrevProps(parent);
+ const start = getFirstKeyStartProps(prev);
+ return {
+ type: 'block-map',
+ offset: this.offset,
+ indent: this.indent,
+ items: [{ start, key: null, sep: [this.sourceToken] }]
+ };
+ }
+ }
+ return null;
+ }
+ atIndentedComment(start, indent) {
+ if (this.type !== 'comment')
+ return false;
+ if (this.indent <= indent)
+ return false;
+ return start.every(st => st.type === 'newline' || st.type === 'space');
+ }
+ *documentEnd(docEnd) {
+ if (this.type !== 'doc-mode') {
+ if (docEnd.end)
+ docEnd.end.push(this.sourceToken);
+ else
+ docEnd.end = [this.sourceToken];
+ if (this.type === 'newline')
+ yield* this.pop();
+ }
+ }
+ *lineEnd(token) {
+ switch (this.type) {
+ case 'comma':
+ case 'doc-start':
+ case 'doc-end':
+ case 'flow-seq-end':
+ case 'flow-map-end':
+ case 'map-value-ind':
+ yield* this.pop();
+ yield* this.step();
+ break;
+ case 'newline':
+ this.onKeyLine = false;
+ // fallthrough
+ case 'space':
+ case 'comment':
+ default:
+ // all other values are errors
+ if (token.end)
+ token.end.push(this.sourceToken);
+ else
+ token.end = [this.sourceToken];
+ if (this.type === 'newline')
+ yield* this.pop();
+ }
+ }
+}
+
+exports.Parser = Parser;
diff --git a/node_modules/yaml/dist/public-api.d.ts b/node_modules/yaml/dist/public-api.d.ts
new file mode 100644
index 0000000..82a4d4f
--- /dev/null
+++ b/node_modules/yaml/dist/public-api.d.ts
@@ -0,0 +1,43 @@
+import { Composer } from './compose/composer.js';
+import type { Reviver } from './doc/applyReviver.js';
+import { Document, Replacer } from './doc/Document.js';
+import type { ParsedNode } from './nodes/Node.js';
+import type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from './options.js';
+export interface EmptyStream extends Array<Document.Parsed>, ReturnType<Composer['streamInfo']> {
+ empty: true;
+}
+/**
+ * Parse the input as a stream of YAML documents.
+ *
+ * Documents should be separated from each other by `...` or `---` marker lines.
+ *
+ * @returns If an empty `docs` array is returned, it will be of type
+ * EmptyStream and contain additional stream information. In
+ * TypeScript, you should use `'empty' in docs` as a type guard for it.
+ */
+export declare function parseAllDocuments<T extends ParsedNode = ParsedNode>(source: string, options?: ParseOptions & DocumentOptions & SchemaOptions): Document.Parsed<T>[] | EmptyStream;
+/** Parse an input string into a single YAML.Document */
+export declare function parseDocument<T extends ParsedNode = ParsedNode>(source: string, options?: ParseOptions & DocumentOptions & SchemaOptions): Document.Parsed<T>;
+/**
+ * Parse an input string into JavaScript.
+ *
+ * Only supports input consisting of a single YAML document; for multi-document
+ * support you should use `YAML.parseAllDocuments`. May throw on error, and may
+ * log warnings using `console.warn`.
+ *
+ * @param str - A string with YAML formatting.
+ * @param reviver - A reviver function, as in `JSON.parse()`
+ * @returns The value will match the type of the root value of the parsed YAML
+ * document, so Maps become objects, Sequences arrays, and scalars result in
+ * nulls, booleans, numbers and strings.
+ */
+export declare function parse(src: string, options?: ParseOptions & DocumentOptions & SchemaOptions & ToJSOptions): any;
+export declare function parse(src: string, reviver: Reviver, options?: ParseOptions & DocumentOptions & SchemaOptions & ToJSOptions): any;
+/**
+ * Stringify a value as a YAML document.
+ *
+ * @param replacer - A replacer array or function, as in `JSON.stringify()`
+ * @returns Will always include `\n` as the last character, as is expected of YAML documents.
+ */
+export declare function stringify(value: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions & ToStringOptions): string;
+export declare function stringify(value: any, replacer?: Replacer | null, options?: string | number | (DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions & ToStringOptions)): string;
diff --git a/node_modules/yaml/dist/public-api.js b/node_modules/yaml/dist/public-api.js
new file mode 100644
index 0000000..6f853a1
--- /dev/null
+++ b/node_modules/yaml/dist/public-api.js
@@ -0,0 +1,105 @@
+'use strict';
+
+var composer = require('./compose/composer.js');
+var Document = require('./doc/Document.js');
+var errors = require('./errors.js');
+var log = require('./log.js');
+var lineCounter = require('./parse/line-counter.js');
+var parser = require('./parse/parser.js');
+
+function parseOptions(options) {
+ const prettyErrors = options.prettyErrors !== false;
+ const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null;
+ return { lineCounter: lineCounter$1, prettyErrors };
+}
+/**
+ * Parse the input as a stream of YAML documents.
+ *
+ * Documents should be separated from each other by `...` or `---` marker lines.
+ *
+ * @returns If an empty `docs` array is returned, it will be of type
+ * EmptyStream and contain additional stream information. In
+ * TypeScript, you should use `'empty' in docs` as a type guard for it.
+ */
+function parseAllDocuments(source, options = {}) {
+ const { lineCounter, prettyErrors } = parseOptions(options);
+ const parser$1 = new parser.Parser(lineCounter === null || lineCounter === void 0 ? void 0 : lineCounter.addNewLine);
+ const composer$1 = new composer.Composer(options);
+ const docs = Array.from(composer$1.compose(parser$1.parse(source)));
+ if (prettyErrors && lineCounter)
+ for (const doc of docs) {
+ doc.errors.forEach(errors.prettifyError(source, lineCounter));
+ doc.warnings.forEach(errors.prettifyError(source, lineCounter));
+ }
+ if (docs.length > 0)
+ return docs;
+ return Object.assign([], { empty: true }, composer$1.streamInfo());
+}
+/** Parse an input string into a single YAML.Document */
+function parseDocument(source, options = {}) {
+ const { lineCounter, prettyErrors } = parseOptions(options);
+ const parser$1 = new parser.Parser(lineCounter === null || lineCounter === void 0 ? void 0 : lineCounter.addNewLine);
+ const composer$1 = new composer.Composer(options);
+ // `doc` is always set by compose.end(true) at the very latest
+ let doc = null;
+ for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) {
+ if (!doc)
+ doc = _doc;
+ else if (doc.options.logLevel !== 'silent') {
+ doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));
+ break;
+ }
+ }
+ if (prettyErrors && lineCounter) {
+ doc.errors.forEach(errors.prettifyError(source, lineCounter));
+ doc.warnings.forEach(errors.prettifyError(source, lineCounter));
+ }
+ return doc;
+}
+function parse(src, reviver, options) {
+ let _reviver = undefined;
+ if (typeof reviver === 'function') {
+ _reviver = reviver;
+ }
+ else if (options === undefined && reviver && typeof reviver === 'object') {
+ options = reviver;
+ }
+ const doc = parseDocument(src, options);
+ if (!doc)
+ return null;
+ doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning));
+ if (doc.errors.length > 0) {
+ if (doc.options.logLevel !== 'silent')
+ throw doc.errors[0];
+ else
+ doc.errors = [];
+ }
+ return doc.toJS(Object.assign({ reviver: _reviver }, options));
+}
+function stringify(value, replacer, options) {
+ var _a;
+ let _replacer = null;
+ if (typeof replacer === 'function' || Array.isArray(replacer)) {
+ _replacer = replacer;
+ }
+ else if (options === undefined && replacer) {
+ options = replacer;
+ }
+ if (typeof options === 'string')
+ options = options.length;
+ if (typeof options === 'number') {
+ const indent = Math.round(options);
+ options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };
+ }
+ if (value === undefined) {
+ const { keepUndefined } = (_a = options !== null && options !== void 0 ? options : replacer) !== null && _a !== void 0 ? _a : {};
+ if (!keepUndefined)
+ return undefined;
+ }
+ return new Document.Document(value, _replacer, options).toString(options);
+}
+
+exports.parse = parse;
+exports.parseAllDocuments = parseAllDocuments;
+exports.parseDocument = parseDocument;
+exports.stringify = stringify;
diff --git a/node_modules/yaml/dist/schema/Schema.d.ts b/node_modules/yaml/dist/schema/Schema.d.ts
new file mode 100644
index 0000000..42c9c41
--- /dev/null
+++ b/node_modules/yaml/dist/schema/Schema.d.ts
@@ -0,0 +1,18 @@
+import { MAP, SCALAR, SEQ } from '../nodes/Node.js';
+import type { Pair } from '../nodes/Pair.js';
+import type { SchemaOptions, ToStringOptions } from '../options.js';
+import type { CollectionTag, ScalarTag } from './types.js';
+export declare class Schema {
+ compat: Array<CollectionTag | ScalarTag> | null;
+ knownTags: Record<string, CollectionTag | ScalarTag>;
+ merge: boolean;
+ name: string;
+ sortMapEntries: ((a: Pair, b: Pair) => number) | null;
+ tags: Array<CollectionTag | ScalarTag>;
+ toStringOptions: Readonly<ToStringOptions> | null;
+ [MAP]: CollectionTag;
+ [SCALAR]: ScalarTag;
+ [SEQ]: CollectionTag;
+ constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }: SchemaOptions);
+ clone(): Schema;
+}
diff --git a/node_modules/yaml/dist/schema/Schema.js b/node_modules/yaml/dist/schema/Schema.js
new file mode 100644
index 0000000..4c409fc
--- /dev/null
+++ b/node_modules/yaml/dist/schema/Schema.js
@@ -0,0 +1,40 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var map = require('./common/map.js');
+var seq = require('./common/seq.js');
+var string = require('./common/string.js');
+var tags = require('./tags.js');
+
+const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
+class Schema {
+ constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {
+ this.compat = Array.isArray(compat)
+ ? tags.getTags(compat, 'compat')
+ : compat
+ ? tags.getTags(null, compat)
+ : null;
+ this.merge = !!merge;
+ this.name = (typeof schema === 'string' && schema) || 'core';
+ this.knownTags = resolveKnownTags ? tags.coreKnownTags : {};
+ this.tags = tags.getTags(customTags, this.name);
+ this.toStringOptions = toStringDefaults !== null && toStringDefaults !== void 0 ? toStringDefaults : null;
+ Object.defineProperty(this, Node.MAP, { value: map.map });
+ Object.defineProperty(this, Node.SCALAR, { value: string.string });
+ Object.defineProperty(this, Node.SEQ, { value: seq.seq });
+ // Used by createMap()
+ this.sortMapEntries =
+ typeof sortMapEntries === 'function'
+ ? sortMapEntries
+ : sortMapEntries === true
+ ? sortMapEntriesByKey
+ : null;
+ }
+ clone() {
+ const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));
+ copy.tags = this.tags.slice();
+ return copy;
+ }
+}
+
+exports.Schema = Schema;
diff --git a/node_modules/yaml/dist/schema/common/map.d.ts b/node_modules/yaml/dist/schema/common/map.d.ts
new file mode 100644
index 0000000..9b300f8
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/map.d.ts
@@ -0,0 +1,2 @@
+import type { CollectionTag } from '../types.js';
+export declare const map: CollectionTag;
diff --git a/node_modules/yaml/dist/schema/common/map.js b/node_modules/yaml/dist/schema/common/map.js
new file mode 100644
index 0000000..46dab3c
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/map.js
@@ -0,0 +1,44 @@
+'use strict';
+
+var Node = require('../../nodes/Node.js');
+var Pair = require('../../nodes/Pair.js');
+var YAMLMap = require('../../nodes/YAMLMap.js');
+
+function createMap(schema, obj, ctx) {
+ const { keepUndefined, replacer } = ctx;
+ const map = new YAMLMap.YAMLMap(schema);
+ const add = (key, value) => {
+ if (typeof replacer === 'function')
+ value = replacer.call(obj, key, value);
+ else if (Array.isArray(replacer) && !replacer.includes(key))
+ return;
+ if (value !== undefined || keepUndefined)
+ map.items.push(Pair.createPair(key, value, ctx));
+ };
+ if (obj instanceof Map) {
+ for (const [key, value] of obj)
+ add(key, value);
+ }
+ else if (obj && typeof obj === 'object') {
+ for (const key of Object.keys(obj))
+ add(key, obj[key]);
+ }
+ if (typeof schema.sortMapEntries === 'function') {
+ map.items.sort(schema.sortMapEntries);
+ }
+ return map;
+}
+const map = {
+ collection: 'map',
+ createNode: createMap,
+ default: true,
+ nodeClass: YAMLMap.YAMLMap,
+ tag: 'tag:yaml.org,2002:map',
+ resolve(map, onError) {
+ if (!Node.isMap(map))
+ onError('Expected a mapping for this tag');
+ return map;
+ }
+};
+
+exports.map = map;
diff --git a/node_modules/yaml/dist/schema/common/null.d.ts b/node_modules/yaml/dist/schema/common/null.d.ts
new file mode 100644
index 0000000..66abea5
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/null.d.ts
@@ -0,0 +1,4 @@
+import type { ScalarTag } from '../types.js';
+export declare const nullTag: ScalarTag & {
+ test: RegExp;
+};
diff --git a/node_modules/yaml/dist/schema/common/null.js b/node_modules/yaml/dist/schema/common/null.js
new file mode 100644
index 0000000..cb353a7
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/null.js
@@ -0,0 +1,17 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+
+const nullTag = {
+ identify: value => value == null,
+ createNode: () => new Scalar.Scalar(null),
+ default: true,
+ tag: 'tag:yaml.org,2002:null',
+ test: /^(?:~|[Nn]ull|NULL)?$/,
+ resolve: () => new Scalar.Scalar(null),
+ stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)
+ ? source
+ : ctx.options.nullStr
+};
+
+exports.nullTag = nullTag;
diff --git a/node_modules/yaml/dist/schema/common/seq.d.ts b/node_modules/yaml/dist/schema/common/seq.d.ts
new file mode 100644
index 0000000..c038d30
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/seq.d.ts
@@ -0,0 +1,2 @@
+import type { CollectionTag } from '../types.js';
+export declare const seq: CollectionTag;
diff --git a/node_modules/yaml/dist/schema/common/seq.js b/node_modules/yaml/dist/schema/common/seq.js
new file mode 100644
index 0000000..0c48bde
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/seq.js
@@ -0,0 +1,35 @@
+'use strict';
+
+var createNode = require('../../doc/createNode.js');
+var Node = require('../../nodes/Node.js');
+var YAMLSeq = require('../../nodes/YAMLSeq.js');
+
+function createSeq(schema, obj, ctx) {
+ const { replacer } = ctx;
+ const seq = new YAMLSeq.YAMLSeq(schema);
+ if (obj && Symbol.iterator in Object(obj)) {
+ let i = 0;
+ for (let it of obj) {
+ if (typeof replacer === 'function') {
+ const key = obj instanceof Set ? it : String(i++);
+ it = replacer.call(obj, key, it);
+ }
+ seq.items.push(createNode.createNode(it, undefined, ctx));
+ }
+ }
+ return seq;
+}
+const seq = {
+ collection: 'seq',
+ createNode: createSeq,
+ default: true,
+ nodeClass: YAMLSeq.YAMLSeq,
+ tag: 'tag:yaml.org,2002:seq',
+ resolve(seq, onError) {
+ if (!Node.isSeq(seq))
+ onError('Expected a sequence for this tag');
+ return seq;
+ }
+};
+
+exports.seq = seq;
diff --git a/node_modules/yaml/dist/schema/common/string.d.ts b/node_modules/yaml/dist/schema/common/string.d.ts
new file mode 100644
index 0000000..539c9b1
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/string.d.ts
@@ -0,0 +1,2 @@
+import type { ScalarTag } from '../types.js';
+export declare const string: ScalarTag;
diff --git a/node_modules/yaml/dist/schema/common/string.js b/node_modules/yaml/dist/schema/common/string.js
new file mode 100644
index 0000000..7601420
--- /dev/null
+++ b/node_modules/yaml/dist/schema/common/string.js
@@ -0,0 +1,16 @@
+'use strict';
+
+var stringifyString = require('../../stringify/stringifyString.js');
+
+const string = {
+ identify: value => typeof value === 'string',
+ default: true,
+ tag: 'tag:yaml.org,2002:str',
+ resolve: str => str,
+ stringify(item, ctx, onComment, onChompKeep) {
+ ctx = Object.assign({ actualString: true }, ctx);
+ return stringifyString.stringifyString(item, ctx, onComment, onChompKeep);
+ }
+};
+
+exports.string = string;
diff --git a/node_modules/yaml/dist/schema/core/bool.d.ts b/node_modules/yaml/dist/schema/core/bool.d.ts
new file mode 100644
index 0000000..e4bdc4c
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/bool.d.ts
@@ -0,0 +1,4 @@
+import type { ScalarTag } from '../types.js';
+export declare const boolTag: ScalarTag & {
+ test: RegExp;
+};
diff --git a/node_modules/yaml/dist/schema/core/bool.js b/node_modules/yaml/dist/schema/core/bool.js
new file mode 100644
index 0000000..4def73c
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/bool.js
@@ -0,0 +1,21 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+
+const boolTag = {
+ identify: value => typeof value === 'boolean',
+ default: true,
+ tag: 'tag:yaml.org,2002:bool',
+ test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,
+ resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'),
+ stringify({ source, value }, ctx) {
+ if (source && boolTag.test.test(source)) {
+ const sv = source[0] === 't' || source[0] === 'T';
+ if (value === sv)
+ return source;
+ }
+ return value ? ctx.options.trueStr : ctx.options.falseStr;
+ }
+};
+
+exports.boolTag = boolTag;
diff --git a/node_modules/yaml/dist/schema/core/float.d.ts b/node_modules/yaml/dist/schema/core/float.d.ts
new file mode 100644
index 0000000..22f0249
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/float.d.ts
@@ -0,0 +1,4 @@
+import type { ScalarTag } from '../types.js';
+export declare const floatNaN: ScalarTag;
+export declare const floatExp: ScalarTag;
+export declare const float: ScalarTag;
diff --git a/node_modules/yaml/dist/schema/core/float.js b/node_modules/yaml/dist/schema/core/float.js
new file mode 100644
index 0000000..a1c96dd
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/float.js
@@ -0,0 +1,47 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+var stringifyNumber = require('../../stringify/stringifyNumber.js');
+
+const floatNaN = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/,
+ resolve: str => str.slice(-3).toLowerCase() === 'nan'
+ ? NaN
+ : str[0] === '-'
+ ? Number.NEGATIVE_INFINITY
+ : Number.POSITIVE_INFINITY,
+ stringify: stringifyNumber.stringifyNumber
+};
+const floatExp = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ format: 'EXP',
+ test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/,
+ resolve: str => parseFloat(str),
+ stringify(node) {
+ const num = Number(node.value);
+ return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);
+ }
+};
+const float = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/,
+ resolve(str) {
+ const node = new Scalar.Scalar(parseFloat(str));
+ const dot = str.indexOf('.');
+ if (dot !== -1 && str[str.length - 1] === '0')
+ node.minFractionDigits = str.length - dot - 1;
+ return node;
+ },
+ stringify: stringifyNumber.stringifyNumber
+};
+
+exports.float = float;
+exports.floatExp = floatExp;
+exports.floatNaN = floatNaN;
diff --git a/node_modules/yaml/dist/schema/core/int.d.ts b/node_modules/yaml/dist/schema/core/int.d.ts
new file mode 100644
index 0000000..35e2d4b
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/int.d.ts
@@ -0,0 +1,4 @@
+import type { ScalarTag } from '../types.js';
+export declare const intOct: ScalarTag;
+export declare const int: ScalarTag;
+export declare const intHex: ScalarTag;
diff --git a/node_modules/yaml/dist/schema/core/int.js b/node_modules/yaml/dist/schema/core/int.js
new file mode 100644
index 0000000..fe4c9ca
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/int.js
@@ -0,0 +1,42 @@
+'use strict';
+
+var stringifyNumber = require('../../stringify/stringifyNumber.js');
+
+const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
+const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));
+function intStringify(node, radix, prefix) {
+ const { value } = node;
+ if (intIdentify(value) && value >= 0)
+ return prefix + value.toString(radix);
+ return stringifyNumber.stringifyNumber(node);
+}
+const intOct = {
+ identify: value => intIdentify(value) && value >= 0,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'OCT',
+ test: /^0o[0-7]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt),
+ stringify: node => intStringify(node, 8, '0o')
+};
+const int = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ test: /^[-+]?[0-9]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
+ stringify: stringifyNumber.stringifyNumber
+};
+const intHex = {
+ identify: value => intIdentify(value) && value >= 0,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'HEX',
+ test: /^0x[0-9a-fA-F]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
+ stringify: node => intStringify(node, 16, '0x')
+};
+
+exports.int = int;
+exports.intHex = intHex;
+exports.intOct = intOct;
diff --git a/node_modules/yaml/dist/schema/core/schema.d.ts b/node_modules/yaml/dist/schema/core/schema.d.ts
new file mode 100644
index 0000000..7663949
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/schema.d.ts
@@ -0,0 +1 @@
+export declare const schema: (import("../types.js").ScalarTag | import("../types.js").CollectionTag)[];
diff --git a/node_modules/yaml/dist/schema/core/schema.js b/node_modules/yaml/dist/schema/core/schema.js
new file mode 100644
index 0000000..6ab87f2
--- /dev/null
+++ b/node_modules/yaml/dist/schema/core/schema.js
@@ -0,0 +1,25 @@
+'use strict';
+
+var map = require('../common/map.js');
+var _null = require('../common/null.js');
+var seq = require('../common/seq.js');
+var string = require('../common/string.js');
+var bool = require('./bool.js');
+var float = require('./float.js');
+var int = require('./int.js');
+
+const schema = [
+ map.map,
+ seq.seq,
+ string.string,
+ _null.nullTag,
+ bool.boolTag,
+ int.intOct,
+ int.int,
+ int.intHex,
+ float.floatNaN,
+ float.floatExp,
+ float.float
+];
+
+exports.schema = schema;
diff --git a/node_modules/yaml/dist/schema/json/schema.d.ts b/node_modules/yaml/dist/schema/json/schema.d.ts
new file mode 100644
index 0000000..a166ca2
--- /dev/null
+++ b/node_modules/yaml/dist/schema/json/schema.d.ts
@@ -0,0 +1,2 @@
+import { CollectionTag, ScalarTag } from '../types.js';
+export declare const schema: (ScalarTag | CollectionTag)[];
diff --git a/node_modules/yaml/dist/schema/json/schema.js b/node_modules/yaml/dist/schema/json/schema.js
new file mode 100644
index 0000000..31d0b4d
--- /dev/null
+++ b/node_modules/yaml/dist/schema/json/schema.js
@@ -0,0 +1,64 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+var map = require('../common/map.js');
+var seq = require('../common/seq.js');
+
+function intIdentify(value) {
+ return typeof value === 'bigint' || Number.isInteger(value);
+}
+const stringifyJSON = ({ value }) => JSON.stringify(value);
+const jsonScalars = [
+ {
+ identify: value => typeof value === 'string',
+ default: true,
+ tag: 'tag:yaml.org,2002:str',
+ resolve: str => str,
+ stringify: stringifyJSON
+ },
+ {
+ identify: value => value == null,
+ createNode: () => new Scalar.Scalar(null),
+ default: true,
+ tag: 'tag:yaml.org,2002:null',
+ test: /^null$/,
+ resolve: () => null,
+ stringify: stringifyJSON
+ },
+ {
+ identify: value => typeof value === 'boolean',
+ default: true,
+ tag: 'tag:yaml.org,2002:bool',
+ test: /^true|false$/,
+ resolve: str => str === 'true',
+ stringify: stringifyJSON
+ },
+ {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ test: /^-?(?:0|[1-9][0-9]*)$/,
+ resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),
+ stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value)
+ },
+ {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,
+ resolve: str => parseFloat(str),
+ stringify: stringifyJSON
+ }
+];
+const jsonError = {
+ default: true,
+ tag: '',
+ test: /^/,
+ resolve(str, onError) {
+ onError(`Unresolved plain scalar ${JSON.stringify(str)}`);
+ return str;
+ }
+};
+const schema = [map.map, seq.seq].concat(jsonScalars, jsonError);
+
+exports.schema = schema;
diff --git a/node_modules/yaml/dist/schema/tags.d.ts b/node_modules/yaml/dist/schema/tags.d.ts
new file mode 100644
index 0000000..8b4acf2
--- /dev/null
+++ b/node_modules/yaml/dist/schema/tags.d.ts
@@ -0,0 +1,40 @@
+import { SchemaOptions } from '../options.js';
+import type { CollectionTag, ScalarTag } from './types.js';
+declare const tagsByName: {
+ binary: ScalarTag;
+ bool: ScalarTag & {
+ test: RegExp;
+ };
+ float: ScalarTag;
+ floatExp: ScalarTag;
+ floatNaN: ScalarTag;
+ floatTime: ScalarTag;
+ int: ScalarTag;
+ intHex: ScalarTag;
+ intOct: ScalarTag;
+ intTime: ScalarTag;
+ map: CollectionTag;
+ null: ScalarTag & {
+ test: RegExp;
+ };
+ omap: CollectionTag;
+ pairs: CollectionTag;
+ seq: CollectionTag;
+ set: CollectionTag;
+ timestamp: ScalarTag & {
+ test: RegExp;
+ };
+};
+export declare type TagId = keyof typeof tagsByName;
+export declare type Tags = Array<ScalarTag | CollectionTag | TagId>;
+export declare const coreKnownTags: {
+ 'tag:yaml.org,2002:binary': ScalarTag;
+ 'tag:yaml.org,2002:omap': CollectionTag;
+ 'tag:yaml.org,2002:pairs': CollectionTag;
+ 'tag:yaml.org,2002:set': CollectionTag;
+ 'tag:yaml.org,2002:timestamp': ScalarTag & {
+ test: RegExp;
+ };
+};
+export declare function getTags(customTags: SchemaOptions['customTags'] | undefined, schemaName: string): (ScalarTag | CollectionTag)[];
+export {};
diff --git a/node_modules/yaml/dist/schema/tags.js b/node_modules/yaml/dist/schema/tags.js
new file mode 100644
index 0000000..e49d2b0
--- /dev/null
+++ b/node_modules/yaml/dist/schema/tags.js
@@ -0,0 +1,86 @@
+'use strict';
+
+var map = require('./common/map.js');
+var _null = require('./common/null.js');
+var seq = require('./common/seq.js');
+var string = require('./common/string.js');
+var bool = require('./core/bool.js');
+var float = require('./core/float.js');
+var int = require('./core/int.js');
+var schema = require('./core/schema.js');
+var schema$1 = require('./json/schema.js');
+var binary = require('./yaml-1.1/binary.js');
+var omap = require('./yaml-1.1/omap.js');
+var pairs = require('./yaml-1.1/pairs.js');
+var schema$2 = require('./yaml-1.1/schema.js');
+var set = require('./yaml-1.1/set.js');
+var timestamp = require('./yaml-1.1/timestamp.js');
+
+const schemas = new Map([
+ ['core', schema.schema],
+ ['failsafe', [map.map, seq.seq, string.string]],
+ ['json', schema$1.schema],
+ ['yaml11', schema$2.schema],
+ ['yaml-1.1', schema$2.schema]
+]);
+const tagsByName = {
+ binary: binary.binary,
+ bool: bool.boolTag,
+ float: float.float,
+ floatExp: float.floatExp,
+ floatNaN: float.floatNaN,
+ floatTime: timestamp.floatTime,
+ int: int.int,
+ intHex: int.intHex,
+ intOct: int.intOct,
+ intTime: timestamp.intTime,
+ map: map.map,
+ null: _null.nullTag,
+ omap: omap.omap,
+ pairs: pairs.pairs,
+ seq: seq.seq,
+ set: set.set,
+ timestamp: timestamp.timestamp
+};
+const coreKnownTags = {
+ 'tag:yaml.org,2002:binary': binary.binary,
+ 'tag:yaml.org,2002:omap': omap.omap,
+ 'tag:yaml.org,2002:pairs': pairs.pairs,
+ 'tag:yaml.org,2002:set': set.set,
+ 'tag:yaml.org,2002:timestamp': timestamp.timestamp
+};
+function getTags(customTags, schemaName) {
+ let tags = schemas.get(schemaName);
+ if (!tags) {
+ if (Array.isArray(customTags))
+ tags = [];
+ else {
+ const keys = Array.from(schemas.keys())
+ .filter(key => key !== 'yaml11')
+ .map(key => JSON.stringify(key))
+ .join(', ');
+ throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`);
+ }
+ }
+ if (Array.isArray(customTags)) {
+ for (const tag of customTags)
+ tags = tags.concat(tag);
+ }
+ else if (typeof customTags === 'function') {
+ tags = customTags(tags.slice());
+ }
+ return tags.map(tag => {
+ if (typeof tag !== 'string')
+ return tag;
+ const tagObj = tagsByName[tag];
+ if (tagObj)
+ return tagObj;
+ const keys = Object.keys(tagsByName)
+ .map(key => JSON.stringify(key))
+ .join(', ');
+ throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
+ });
+}
+
+exports.coreKnownTags = coreKnownTags;
+exports.getTags = getTags;
diff --git a/node_modules/yaml/dist/schema/types.d.ts b/node_modules/yaml/dist/schema/types.d.ts
new file mode 100644
index 0000000..13e8354
--- /dev/null
+++ b/node_modules/yaml/dist/schema/types.d.ts
@@ -0,0 +1,82 @@
+import type { CreateNodeContext } from '../doc/createNode.js';
+import type { Schema } from './Schema.js';
+import type { Node } from '../nodes/Node.js';
+import type { Scalar } from '../nodes/Scalar.js';
+import type { YAMLMap } from '../nodes/YAMLMap.js';
+import type { YAMLSeq } from '../nodes/YAMLSeq.js';
+import type { ParseOptions } from '../options.js';
+import type { StringifyContext } from '../stringify/stringify.js';
+interface TagBase {
+ /**
+ * An optional factory function, used e.g. by collections when wrapping JS objects as AST nodes.
+ */
+ createNode?: (schema: Schema, value: unknown, ctx: CreateNodeContext) => Node;
+ /**
+ * If `true`, together with `test` allows for values to be stringified without
+ * an explicit tag. For most cases, it's unlikely that you'll actually want to
+ * use this, even if you first think you do.
+ */
+ default: boolean;
+ /**
+ * If a tag has multiple forms that should be parsed and/or stringified
+ * differently, use `format` to identify them.
+ */
+ format?: string;
+ /**
+ * Used by `YAML.createNode` to detect your data type, e.g. using `typeof` or
+ * `instanceof`.
+ */
+ identify?: (value: unknown) => boolean;
+ /**
+ * The identifier for your data type, with which its stringified form will be
+ * prefixed. Should either be a !-prefixed local `!tag`, or a fully qualified
+ * `tag:domain,date:foo`.
+ */
+ tag: string;
+}
+export interface ScalarTag extends TagBase {
+ collection?: never;
+ nodeClass?: never;
+ /**
+ * Turns a value into an AST node.
+ * If returning a non-`Node` value, the output will be wrapped as a `Scalar`.
+ */
+ resolve(value: string, onError: (message: string) => void, options: ParseOptions): unknown;
+ /**
+ * Optional function stringifying a Scalar node. If your data includes a
+ * suitable `.toString()` method, you can probably leave this undefined and
+ * use the default stringifier.
+ *
+ * @param item The node being stringified.
+ * @param ctx Contains the stringifying context variables.
+ * @param onComment Callback to signal that the stringifier includes the
+ * item's comment in its output.
+ * @param onChompKeep Callback to signal that the output uses a block scalar
+ * type with the `+` chomping indicator.
+ */
+ stringify?: (item: Scalar, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void) => string;
+ /**
+ * Together with `default` allows for values to be stringified without an
+ * explicit tag and detected using a regular expression. For most cases, it's
+ * unlikely that you'll actually want to use these, even if you first think
+ * you do.
+ */
+ test?: RegExp;
+}
+export interface CollectionTag extends TagBase {
+ stringify?: never;
+ test?: never;
+ /** The source collection type supported by this tag. */
+ collection: 'map' | 'seq';
+ /**
+ * The `Node` child class that implements this tag.
+ * If set, used to select this tag when stringifying.
+ */
+ nodeClass?: new () => Node;
+ /**
+ * Turns a value into an AST node.
+ * If returning a non-`Node` value, the output will be wrapped as a `Scalar`.
+ */
+ resolve(value: YAMLMap.Parsed | YAMLSeq.Parsed, onError: (message: string) => void, options: ParseOptions): unknown;
+}
+export {};
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts
new file mode 100644
index 0000000..2054970
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts
@@ -0,0 +1,2 @@
+import type { ScalarTag } from '../types.js';
+export declare const binary: ScalarTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/binary.js b/node_modules/yaml/dist/schema/yaml-1.1/binary.js
new file mode 100644
index 0000000..df93e05
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/binary.js
@@ -0,0 +1,68 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+var stringifyString = require('../../stringify/stringifyString.js');
+
+const binary = {
+ identify: value => value instanceof Uint8Array,
+ default: false,
+ tag: 'tag:yaml.org,2002:binary',
+ /**
+ * Returns a Buffer in node and an Uint8Array in browsers
+ *
+ * To use the resulting buffer as an image, you'll want to do something like:
+ *
+ * const blob = new Blob([buffer], { type: 'image/jpeg' })
+ * document.querySelector('#photo').src = URL.createObjectURL(blob)
+ */
+ resolve(src, onError) {
+ if (typeof Buffer === 'function') {
+ return Buffer.from(src, 'base64');
+ }
+ else if (typeof atob === 'function') {
+ // On IE 11, atob() can't handle newlines
+ const str = atob(src.replace(/[\n\r]/g, ''));
+ const buffer = new Uint8Array(str.length);
+ for (let i = 0; i < str.length; ++i)
+ buffer[i] = str.charCodeAt(i);
+ return buffer;
+ }
+ else {
+ onError('This environment does not support reading binary tags; either Buffer or atob is required');
+ return src;
+ }
+ },
+ stringify({ comment, type, value }, ctx, onComment, onChompKeep) {
+ const buf = value; // checked earlier by binary.identify()
+ let str;
+ if (typeof Buffer === 'function') {
+ str =
+ buf instanceof Buffer
+ ? buf.toString('base64')
+ : Buffer.from(buf.buffer).toString('base64');
+ }
+ else if (typeof btoa === 'function') {
+ let s = '';
+ for (let i = 0; i < buf.length; ++i)
+ s += String.fromCharCode(buf[i]);
+ str = btoa(s);
+ }
+ else {
+ throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
+ }
+ if (!type)
+ type = Scalar.Scalar.BLOCK_LITERAL;
+ if (type !== Scalar.Scalar.QUOTE_DOUBLE) {
+ const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);
+ const n = Math.ceil(str.length / lineWidth);
+ const lines = new Array(n);
+ for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {
+ lines[i] = str.substr(o, lineWidth);
+ }
+ str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\n' : ' ');
+ }
+ return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);
+ }
+};
+
+exports.binary = binary;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts
new file mode 100644
index 0000000..587b55b
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts
@@ -0,0 +1,7 @@
+import type { ScalarTag } from '../types.js';
+export declare const trueTag: ScalarTag & {
+ test: RegExp;
+};
+export declare const falseTag: ScalarTag & {
+ test: RegExp;
+};
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/bool.js b/node_modules/yaml/dist/schema/yaml-1.1/bool.js
new file mode 100644
index 0000000..fdb3b46
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/bool.js
@@ -0,0 +1,29 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+
+function boolStringify({ value, source }, ctx) {
+ const boolObj = value ? trueTag : falseTag;
+ if (source && boolObj.test.test(source))
+ return source;
+ return value ? ctx.options.trueStr : ctx.options.falseStr;
+}
+const trueTag = {
+ identify: value => value === true,
+ default: true,
+ tag: 'tag:yaml.org,2002:bool',
+ test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,
+ resolve: () => new Scalar.Scalar(true),
+ stringify: boolStringify
+};
+const falseTag = {
+ identify: value => value === false,
+ default: true,
+ tag: 'tag:yaml.org,2002:bool',
+ test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,
+ resolve: () => new Scalar.Scalar(false),
+ stringify: boolStringify
+};
+
+exports.falseTag = falseTag;
+exports.trueTag = trueTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/float.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/float.d.ts
new file mode 100644
index 0000000..22f0249
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/float.d.ts
@@ -0,0 +1,4 @@
+import type { ScalarTag } from '../types.js';
+export declare const floatNaN: ScalarTag;
+export declare const floatExp: ScalarTag;
+export declare const float: ScalarTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/float.js b/node_modules/yaml/dist/schema/yaml-1.1/float.js
new file mode 100644
index 0000000..4aea19a
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/float.js
@@ -0,0 +1,50 @@
+'use strict';
+
+var Scalar = require('../../nodes/Scalar.js');
+var stringifyNumber = require('../../stringify/stringifyNumber.js');
+
+const floatNaN = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/,
+ resolve: (str) => str.slice(-3).toLowerCase() === 'nan'
+ ? NaN
+ : str[0] === '-'
+ ? Number.NEGATIVE_INFINITY
+ : Number.POSITIVE_INFINITY,
+ stringify: stringifyNumber.stringifyNumber
+};
+const floatExp = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ format: 'EXP',
+ test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/,
+ resolve: (str) => parseFloat(str.replace(/_/g, '')),
+ stringify(node) {
+ const num = Number(node.value);
+ return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);
+ }
+};
+const float = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/,
+ resolve(str) {
+ const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, '')));
+ const dot = str.indexOf('.');
+ if (dot !== -1) {
+ const f = str.substring(dot + 1).replace(/_/g, '');
+ if (f[f.length - 1] === '0')
+ node.minFractionDigits = f.length;
+ }
+ return node;
+ },
+ stringify: stringifyNumber.stringifyNumber
+};
+
+exports.float = float;
+exports.floatExp = floatExp;
+exports.floatNaN = floatNaN;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/int.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/int.d.ts
new file mode 100644
index 0000000..3d92f37
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/int.d.ts
@@ -0,0 +1,5 @@
+import type { ScalarTag } from '../types.js';
+export declare const intBin: ScalarTag;
+export declare const intOct: ScalarTag;
+export declare const int: ScalarTag;
+export declare const intHex: ScalarTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/int.js b/node_modules/yaml/dist/schema/yaml-1.1/int.js
new file mode 100644
index 0000000..fdf47ca
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/int.js
@@ -0,0 +1,76 @@
+'use strict';
+
+var stringifyNumber = require('../../stringify/stringifyNumber.js');
+
+const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
+function intResolve(str, offset, radix, { intAsBigInt }) {
+ const sign = str[0];
+ if (sign === '-' || sign === '+')
+ offset += 1;
+ str = str.substring(offset).replace(/_/g, '');
+ if (intAsBigInt) {
+ switch (radix) {
+ case 2:
+ str = `0b${str}`;
+ break;
+ case 8:
+ str = `0o${str}`;
+ break;
+ case 16:
+ str = `0x${str}`;
+ break;
+ }
+ const n = BigInt(str);
+ return sign === '-' ? BigInt(-1) * n : n;
+ }
+ const n = parseInt(str, radix);
+ return sign === '-' ? -1 * n : n;
+}
+function intStringify(node, radix, prefix) {
+ const { value } = node;
+ if (intIdentify(value)) {
+ const str = value.toString(radix);
+ return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;
+ }
+ return stringifyNumber.stringifyNumber(node);
+}
+const intBin = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'BIN',
+ test: /^[-+]?0b[0-1_]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),
+ stringify: node => intStringify(node, 2, '0b')
+};
+const intOct = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'OCT',
+ test: /^[-+]?0[0-7_]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),
+ stringify: node => intStringify(node, 8, '0')
+};
+const int = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ test: /^[-+]?[0-9][0-9_]*$/,
+ resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
+ stringify: stringifyNumber.stringifyNumber
+};
+const intHex = {
+ identify: intIdentify,
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'HEX',
+ test: /^[-+]?0x[0-9a-fA-F_]+$/,
+ resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
+ stringify: node => intStringify(node, 16, '0x')
+};
+
+exports.int = int;
+exports.intBin = intBin;
+exports.intHex = intHex;
+exports.intOct = intOct;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts
new file mode 100644
index 0000000..4d71bc4
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts
@@ -0,0 +1,21 @@
+import { YAMLSeq } from '../../nodes/YAMLSeq.js';
+import { ToJSContext } from '../../nodes/toJS.js';
+import { CollectionTag } from '../types.js';
+export declare class YAMLOMap extends YAMLSeq {
+ static tag: string;
+ constructor();
+ add: (pair: import("../../index.js").Pair<any, any> | {
+ key: any;
+ value: any;
+ }, overwrite?: boolean | undefined) => void;
+ delete: (key: any) => boolean;
+ get: (key: any, keepScalar?: boolean | undefined) => unknown;
+ has: (key: any) => boolean;
+ set: (key: any, value: any) => void;
+ /**
+ * If `ctx` is given, the return type is actually `Map<unknown, unknown>`,
+ * but TypeScript won't allow widening the signature of a child method.
+ */
+ toJSON(_?: unknown, ctx?: ToJSContext): unknown[];
+}
+export declare const omap: CollectionTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/omap.js b/node_modules/yaml/dist/schema/yaml-1.1/omap.js
new file mode 100644
index 0000000..006a65d
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/omap.js
@@ -0,0 +1,76 @@
+'use strict';
+
+var YAMLSeq = require('../../nodes/YAMLSeq.js');
+var toJS = require('../../nodes/toJS.js');
+var Node = require('../../nodes/Node.js');
+var YAMLMap = require('../../nodes/YAMLMap.js');
+var pairs = require('./pairs.js');
+
+class YAMLOMap extends YAMLSeq.YAMLSeq {
+ constructor() {
+ super();
+ this.add = YAMLMap.YAMLMap.prototype.add.bind(this);
+ this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this);
+ this.get = YAMLMap.YAMLMap.prototype.get.bind(this);
+ this.has = YAMLMap.YAMLMap.prototype.has.bind(this);
+ this.set = YAMLMap.YAMLMap.prototype.set.bind(this);
+ this.tag = YAMLOMap.tag;
+ }
+ /**
+ * If `ctx` is given, the return type is actually `Map<unknown, unknown>`,
+ * but TypeScript won't allow widening the signature of a child method.
+ */
+ toJSON(_, ctx) {
+ if (!ctx)
+ return super.toJSON(_);
+ const map = new Map();
+ if (ctx === null || ctx === void 0 ? void 0 : ctx.onCreate)
+ ctx.onCreate(map);
+ for (const pair of this.items) {
+ let key, value;
+ if (Node.isPair(pair)) {
+ key = toJS.toJS(pair.key, '', ctx);
+ value = toJS.toJS(pair.value, key, ctx);
+ }
+ else {
+ key = toJS.toJS(pair, '', ctx);
+ }
+ if (map.has(key))
+ throw new Error('Ordered maps must not include duplicate keys');
+ map.set(key, value);
+ }
+ return map;
+ }
+}
+YAMLOMap.tag = 'tag:yaml.org,2002:omap';
+const omap = {
+ collection: 'seq',
+ identify: value => value instanceof Map,
+ nodeClass: YAMLOMap,
+ default: false,
+ tag: 'tag:yaml.org,2002:omap',
+ resolve(seq, onError) {
+ const pairs$1 = pairs.resolvePairs(seq, onError);
+ const seenKeys = [];
+ for (const { key } of pairs$1.items) {
+ if (Node.isScalar(key)) {
+ if (seenKeys.includes(key.value)) {
+ onError(`Ordered maps must not include duplicate keys: ${key.value}`);
+ }
+ else {
+ seenKeys.push(key.value);
+ }
+ }
+ }
+ return Object.assign(new YAMLOMap(), pairs$1);
+ },
+ createNode(schema, iterable, ctx) {
+ const pairs$1 = pairs.createPairs(schema, iterable, ctx);
+ const omap = new YAMLOMap();
+ omap.items = pairs$1.items;
+ return omap;
+ }
+};
+
+exports.YAMLOMap = YAMLOMap;
+exports.omap = omap;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts
new file mode 100644
index 0000000..8090ac2
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts
@@ -0,0 +1,10 @@
+import type { CreateNodeContext } from '../../doc/createNode.js';
+import { ParsedNode } from '../../nodes/Node.js';
+import { Pair } from '../../nodes/Pair.js';
+import { YAMLMap } from '../../nodes/YAMLMap.js';
+import { YAMLSeq } from '../../nodes/YAMLSeq.js';
+import type { Schema } from '../../schema/Schema.js';
+import type { CollectionTag } from '../types.js';
+export declare function resolvePairs(seq: YAMLSeq.Parsed<ParsedNode | Pair<ParsedNode, ParsedNode | null>> | YAMLMap.Parsed, onError: (message: string) => void): YAMLSeq.Parsed<Pair<ParsedNode, ParsedNode | null>>;
+export declare function createPairs(schema: Schema, iterable: unknown, ctx: CreateNodeContext): YAMLSeq<unknown>;
+export declare const pairs: CollectionTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/pairs.js b/node_modules/yaml/dist/schema/yaml-1.1/pairs.js
new file mode 100644
index 0000000..12858ab
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/pairs.js
@@ -0,0 +1,82 @@
+'use strict';
+
+var Node = require('../../nodes/Node.js');
+var Pair = require('../../nodes/Pair.js');
+var Scalar = require('../../nodes/Scalar.js');
+var YAMLSeq = require('../../nodes/YAMLSeq.js');
+
+function resolvePairs(seq, onError) {
+ var _a;
+ if (Node.isSeq(seq)) {
+ for (let i = 0; i < seq.items.length; ++i) {
+ let item = seq.items[i];
+ if (Node.isPair(item))
+ continue;
+ else if (Node.isMap(item)) {
+ if (item.items.length > 1)
+ onError('Each pair must have its own sequence indicator');
+ const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null));
+ if (item.commentBefore)
+ pair.key.commentBefore = pair.key.commentBefore
+ ? `${item.commentBefore}\n${pair.key.commentBefore}`
+ : item.commentBefore;
+ if (item.comment) {
+ const cn = (_a = pair.value) !== null && _a !== void 0 ? _a : pair.key;
+ cn.comment = cn.comment
+ ? `${item.comment}\n${cn.comment}`
+ : item.comment;
+ }
+ item = pair;
+ }
+ seq.items[i] = Node.isPair(item) ? item : new Pair.Pair(item);
+ }
+ }
+ else
+ onError('Expected a sequence for this tag');
+ return seq;
+}
+function createPairs(schema, iterable, ctx) {
+ const { replacer } = ctx;
+ const pairs = new YAMLSeq.YAMLSeq(schema);
+ pairs.tag = 'tag:yaml.org,2002:pairs';
+ let i = 0;
+ if (iterable && Symbol.iterator in Object(iterable))
+ for (let it of iterable) {
+ if (typeof replacer === 'function')
+ it = replacer.call(iterable, String(i++), it);
+ let key, value;
+ if (Array.isArray(it)) {
+ if (it.length === 2) {
+ key = it[0];
+ value = it[1];
+ }
+ else
+ throw new TypeError(`Expected [key, value] tuple: ${it}`);
+ }
+ else if (it && it instanceof Object) {
+ const keys = Object.keys(it);
+ if (keys.length === 1) {
+ key = keys[0];
+ value = it[key];
+ }
+ else
+ throw new TypeError(`Expected { key: value } tuple: ${it}`);
+ }
+ else {
+ key = it;
+ }
+ pairs.items.push(Pair.createPair(key, value, ctx));
+ }
+ return pairs;
+}
+const pairs = {
+ collection: 'seq',
+ default: false,
+ tag: 'tag:yaml.org,2002:pairs',
+ resolve: resolvePairs,
+ createNode: createPairs
+};
+
+exports.createPairs = createPairs;
+exports.pairs = pairs;
+exports.resolvePairs = resolvePairs;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts
new file mode 100644
index 0000000..7663949
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts
@@ -0,0 +1 @@
+export declare const schema: (import("../types.js").ScalarTag | import("../types.js").CollectionTag)[];
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/schema.js b/node_modules/yaml/dist/schema/yaml-1.1/schema.js
new file mode 100644
index 0000000..c04270a
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/schema.js
@@ -0,0 +1,39 @@
+'use strict';
+
+var map = require('../common/map.js');
+var _null = require('../common/null.js');
+var seq = require('../common/seq.js');
+var string = require('../common/string.js');
+var binary = require('./binary.js');
+var bool = require('./bool.js');
+var float = require('./float.js');
+var int = require('./int.js');
+var omap = require('./omap.js');
+var pairs = require('./pairs.js');
+var set = require('./set.js');
+var timestamp = require('./timestamp.js');
+
+const schema = [
+ map.map,
+ seq.seq,
+ string.string,
+ _null.nullTag,
+ bool.trueTag,
+ bool.falseTag,
+ int.intBin,
+ int.intOct,
+ int.int,
+ int.intHex,
+ float.floatNaN,
+ float.floatExp,
+ float.float,
+ binary.binary,
+ omap.omap,
+ pairs.pairs,
+ set.set,
+ timestamp.intTime,
+ timestamp.floatTime,
+ timestamp.timestamp
+];
+
+exports.schema = schema;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/set.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/set.d.ts
new file mode 100644
index 0000000..fa7cdac
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/set.d.ts
@@ -0,0 +1,22 @@
+import type { Schema } from '../../schema/Schema.js';
+import { Pair } from '../../nodes/Pair.js';
+import { Scalar } from '../../nodes/Scalar.js';
+import { ToJSContext } from '../../nodes/toJS.js';
+import { YAMLMap } from '../../nodes/YAMLMap.js';
+import type { StringifyContext } from '../../stringify/stringify.js';
+import type { CollectionTag } from '../types.js';
+export declare class YAMLSet<T = unknown> extends YAMLMap<T, Scalar<null> | null> {
+ static tag: string;
+ constructor(schema?: Schema);
+ add(key: T | Pair<T, Scalar<null> | null> | {
+ key: T;
+ value: Scalar<null> | null;
+ }): void;
+ get(key?: T, keepPair?: boolean): unknown;
+ set(key: T, value: boolean): void;
+ /** Will throw; `value` must be boolean */
+ set(key: T, value: null): void;
+ toJSON(_?: unknown, ctx?: ToJSContext): any;
+ toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
+}
+export declare const set: CollectionTag;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/set.js b/node_modules/yaml/dist/schema/yaml-1.1/set.js
new file mode 100644
index 0000000..2270695
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/set.js
@@ -0,0 +1,90 @@
+'use strict';
+
+var Node = require('../../nodes/Node.js');
+var Pair = require('../../nodes/Pair.js');
+var YAMLMap = require('../../nodes/YAMLMap.js');
+
+class YAMLSet extends YAMLMap.YAMLMap {
+ constructor(schema) {
+ super(schema);
+ this.tag = YAMLSet.tag;
+ }
+ add(key) {
+ let pair;
+ if (Node.isPair(key))
+ pair = key;
+ else if (typeof key === 'object' &&
+ 'key' in key &&
+ 'value' in key &&
+ key.value === null)
+ pair = new Pair.Pair(key.key, null);
+ else
+ pair = new Pair.Pair(key, null);
+ const prev = YAMLMap.findPair(this.items, pair.key);
+ if (!prev)
+ this.items.push(pair);
+ }
+ get(key, keepPair) {
+ const pair = YAMLMap.findPair(this.items, key);
+ return !keepPair && Node.isPair(pair)
+ ? Node.isScalar(pair.key)
+ ? pair.key.value
+ : pair.key
+ : pair;
+ }
+ set(key, value) {
+ if (typeof value !== 'boolean')
+ throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);
+ const prev = YAMLMap.findPair(this.items, key);
+ if (prev && !value) {
+ this.items.splice(this.items.indexOf(prev), 1);
+ }
+ else if (!prev && value) {
+ this.items.push(new Pair.Pair(key));
+ }
+ }
+ toJSON(_, ctx) {
+ return super.toJSON(_, ctx, Set);
+ }
+ toString(ctx, onComment, onChompKeep) {
+ if (!ctx)
+ return JSON.stringify(this);
+ if (this.hasAllNullValues(true))
+ return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);
+ else
+ throw new Error('Set items must all have null values');
+ }
+}
+YAMLSet.tag = 'tag:yaml.org,2002:set';
+const set = {
+ collection: 'map',
+ identify: value => value instanceof Set,
+ nodeClass: YAMLSet,
+ default: false,
+ tag: 'tag:yaml.org,2002:set',
+ resolve(map, onError) {
+ if (Node.isMap(map)) {
+ if (map.hasAllNullValues(true))
+ return Object.assign(new YAMLSet(), map);
+ else
+ onError('Set items must all have null values');
+ }
+ else
+ onError('Expected a mapping for this tag');
+ return map;
+ },
+ createNode(schema, iterable, ctx) {
+ const { replacer } = ctx;
+ const set = new YAMLSet(schema);
+ if (iterable && Symbol.iterator in Object(iterable))
+ for (let value of iterable) {
+ if (typeof replacer === 'function')
+ value = replacer.call(iterable, value, value);
+ set.items.push(Pair.createPair(value, null, ctx));
+ }
+ return set;
+ }
+};
+
+exports.YAMLSet = YAMLSet;
+exports.set = set;
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts b/node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts
new file mode 100644
index 0000000..0c1d2d4
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts
@@ -0,0 +1,6 @@
+import type { ScalarTag } from '../types.js';
+export declare const intTime: ScalarTag;
+export declare const floatTime: ScalarTag;
+export declare const timestamp: ScalarTag & {
+ test: RegExp;
+};
diff --git a/node_modules/yaml/dist/schema/yaml-1.1/timestamp.js b/node_modules/yaml/dist/schema/yaml-1.1/timestamp.js
new file mode 100644
index 0000000..2d78ae8
--- /dev/null
+++ b/node_modules/yaml/dist/schema/yaml-1.1/timestamp.js
@@ -0,0 +1,105 @@
+'use strict';
+
+var stringifyNumber = require('../../stringify/stringifyNumber.js');
+
+/** Internal types handle bigint as number, because TS can't figure it out. */
+function parseSexagesimal(str, asBigInt) {
+ const sign = str[0];
+ const parts = sign === '-' || sign === '+' ? str.substring(1) : str;
+ const num = (n) => asBigInt ? BigInt(n) : Number(n);
+ const res = parts
+ .replace(/_/g, '')
+ .split(':')
+ .reduce((res, p) => res * num(60) + num(p), num(0));
+ return (sign === '-' ? num(-1) * res : res);
+}
+/**
+ * hhhh:mm:ss.sss
+ *
+ * Internal types handle bigint as number, because TS can't figure it out.
+ */
+function stringifySexagesimal(node) {
+ let { value } = node;
+ let num = (n) => n;
+ if (typeof value === 'bigint')
+ num = n => BigInt(n);
+ else if (isNaN(value) || !isFinite(value))
+ return stringifyNumber.stringifyNumber(node);
+ let sign = '';
+ if (value < 0) {
+ sign = '-';
+ value *= num(-1);
+ }
+ const _60 = num(60);
+ const parts = [value % _60]; // seconds, including ms
+ if (value < 60) {
+ parts.unshift(0); // at least one : is required
+ }
+ else {
+ value = (value - parts[0]) / _60;
+ parts.unshift(value % _60); // minutes
+ if (value >= 60) {
+ value = (value - parts[0]) / _60;
+ parts.unshift(value); // hours
+ }
+ }
+ return (sign +
+ parts
+ .map(n => (n < 10 ? '0' + String(n) : String(n)))
+ .join(':')
+ .replace(/000000\d*$/, '') // % 60 may introduce error
+ );
+}
+const intTime = {
+ identify: value => typeof value === 'bigint' || Number.isInteger(value),
+ default: true,
+ tag: 'tag:yaml.org,2002:int',
+ format: 'TIME',
+ test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,
+ resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),
+ stringify: stringifySexagesimal
+};
+const floatTime = {
+ identify: value => typeof value === 'number',
+ default: true,
+ tag: 'tag:yaml.org,2002:float',
+ format: 'TIME',
+ test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/,
+ resolve: str => parseSexagesimal(str, false),
+ stringify: stringifySexagesimal
+};
+const timestamp = {
+ identify: value => value instanceof Date,
+ default: true,
+ tag: 'tag:yaml.org,2002:timestamp',
+ // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part
+ // may be omitted altogether, resulting in a date format. In such a case, the time part is
+ // assumed to be 00:00:00Z (start of day, UTC).
+ test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd
+ '(?:' + // time is optional
+ '(?:t|T|[ \\t]+)' + // t | T | whitespace
+ '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?
+ '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30
+ ')?$'),
+ resolve(str) {
+ const match = str.match(timestamp.test);
+ if (!match)
+ throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');
+ const [, year, month, day, hour, minute, second] = match.map(Number);
+ const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;
+ let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);
+ const tz = match[8];
+ if (tz && tz !== 'Z') {
+ let d = parseSexagesimal(tz, false);
+ if (Math.abs(d) < 30)
+ d *= 60;
+ date -= 60000 * d;
+ }
+ return new Date(date);
+ },
+ stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '')
+};
+
+exports.floatTime = floatTime;
+exports.intTime = intTime;
+exports.timestamp = timestamp;
diff --git a/node_modules/yaml/dist/stringify/foldFlowLines.d.ts b/node_modules/yaml/dist/stringify/foldFlowLines.d.ts
new file mode 100644
index 0000000..58f8c7b
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/foldFlowLines.d.ts
@@ -0,0 +1,34 @@
+export declare const FOLD_FLOW = "flow";
+export declare const FOLD_BLOCK = "block";
+export declare const FOLD_QUOTED = "quoted";
+/**
+ * `'block'` prevents more-indented lines from being folded;
+ * `'quoted'` allows for `\` escapes, including escaped newlines
+ */
+export declare type FoldMode = 'flow' | 'block' | 'quoted';
+export interface FoldOptions {
+ /**
+ * Accounts for leading contents on the first line, defaulting to
+ * `indent.length`
+ */
+ indentAtStart?: number;
+ /** Default: `80` */
+ lineWidth?: number;
+ /**
+ * Allow highly indented lines to stretch the line width or indent content
+ * from the start.
+ *
+ * Default: `20`
+ */
+ minContentWidth?: number;
+ /** Called once if the text is folded */
+ onFold?: () => void;
+ /** Called once if any line of text exceeds lineWidth characters */
+ onOverflow?: () => void;
+}
+/**
+ * Tries to keep input at up to `lineWidth` characters, splitting only on spaces
+ * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are
+ * terminated with `\n` and started with `indent`.
+ */
+export declare function foldFlowLines(text: string, indent: string, mode?: FoldMode, { indentAtStart, lineWidth, minContentWidth, onFold, onOverflow }?: FoldOptions): string;
diff --git a/node_modules/yaml/dist/stringify/foldFlowLines.js b/node_modules/yaml/dist/stringify/foldFlowLines.js
new file mode 100644
index 0000000..efe7a25
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/foldFlowLines.js
@@ -0,0 +1,140 @@
+'use strict';
+
+const FOLD_FLOW = 'flow';
+const FOLD_BLOCK = 'block';
+const FOLD_QUOTED = 'quoted';
+/**
+ * Tries to keep input at up to `lineWidth` characters, splitting only on spaces
+ * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are
+ * terminated with `\n` and started with `indent`.
+ */
+function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {
+ if (!lineWidth || lineWidth < 0)
+ return text;
+ const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);
+ if (text.length <= endStep)
+ return text;
+ const folds = [];
+ const escapedFolds = {};
+ let end = lineWidth - indent.length;
+ if (typeof indentAtStart === 'number') {
+ if (indentAtStart > lineWidth - Math.max(2, minContentWidth))
+ folds.push(0);
+ else
+ end = lineWidth - indentAtStart;
+ }
+ let split = undefined;
+ let prev = undefined;
+ let overflow = false;
+ let i = -1;
+ let escStart = -1;
+ let escEnd = -1;
+ if (mode === FOLD_BLOCK) {
+ i = consumeMoreIndentedLines(text, i);
+ if (i !== -1)
+ end = i + endStep;
+ }
+ for (let ch; (ch = text[(i += 1)]);) {
+ if (mode === FOLD_QUOTED && ch === '\\') {
+ escStart = i;
+ switch (text[i + 1]) {
+ case 'x':
+ i += 3;
+ break;
+ case 'u':
+ i += 5;
+ break;
+ case 'U':
+ i += 9;
+ break;
+ default:
+ i += 1;
+ }
+ escEnd = i;
+ }
+ if (ch === '\n') {
+ if (mode === FOLD_BLOCK)
+ i = consumeMoreIndentedLines(text, i);
+ end = i + endStep;
+ split = undefined;
+ }
+ else {
+ if (ch === ' ' &&
+ prev &&
+ prev !== ' ' &&
+ prev !== '\n' &&
+ prev !== '\t') {
+ // space surrounded by non-space can be replaced with newline + indent
+ const next = text[i + 1];
+ if (next && next !== ' ' && next !== '\n' && next !== '\t')
+ split = i;
+ }
+ if (i >= end) {
+ if (split) {
+ folds.push(split);
+ end = split + endStep;
+ split = undefined;
+ }
+ else if (mode === FOLD_QUOTED) {
+ // white-space collected at end may stretch past lineWidth
+ while (prev === ' ' || prev === '\t') {
+ prev = ch;
+ ch = text[(i += 1)];
+ overflow = true;
+ }
+ // Account for newline escape, but don't break preceding escape
+ const j = i > escEnd + 1 ? i - 2 : escStart - 1;
+ // Bail out if lineWidth & minContentWidth are shorter than an escape string
+ if (escapedFolds[j])
+ return text;
+ folds.push(j);
+ escapedFolds[j] = true;
+ end = j + endStep;
+ split = undefined;
+ }
+ else {
+ overflow = true;
+ }
+ }
+ }
+ prev = ch;
+ }
+ if (overflow && onOverflow)
+ onOverflow();
+ if (folds.length === 0)
+ return text;
+ if (onFold)
+ onFold();
+ let res = text.slice(0, folds[0]);
+ for (let i = 0; i < folds.length; ++i) {
+ const fold = folds[i];
+ const end = folds[i + 1] || text.length;
+ if (fold === 0)
+ res = `\n${indent}${text.slice(0, end)}`;
+ else {
+ if (mode === FOLD_QUOTED && escapedFolds[fold])
+ res += `${text[fold]}\\`;
+ res += `\n${indent}${text.slice(fold + 1, end)}`;
+ }
+ }
+ return res;
+}
+/**
+ * Presumes `i + 1` is at the start of a line
+ * @returns index of last newline in more-indented block
+ */
+function consumeMoreIndentedLines(text, i) {
+ let ch = text[i + 1];
+ while (ch === ' ' || ch === '\t') {
+ do {
+ ch = text[(i += 1)];
+ } while (ch && ch !== '\n');
+ ch = text[i + 1];
+ }
+ return i;
+}
+
+exports.FOLD_BLOCK = FOLD_BLOCK;
+exports.FOLD_FLOW = FOLD_FLOW;
+exports.FOLD_QUOTED = FOLD_QUOTED;
+exports.foldFlowLines = foldFlowLines;
diff --git a/node_modules/yaml/dist/stringify/stringify.d.ts b/node_modules/yaml/dist/stringify/stringify.d.ts
new file mode 100644
index 0000000..fe96889
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringify.d.ts
@@ -0,0 +1,20 @@
+import type { Document } from '../doc/Document.js';
+import type { Alias } from '../nodes/Alias.js';
+import type { ToStringOptions } from '../options.js';
+export declare type StringifyContext = {
+ actualString?: boolean;
+ allNullValues?: boolean;
+ anchors: Set<string>;
+ doc: Document;
+ forceBlockIndent?: boolean;
+ implicitKey?: boolean;
+ indent: string;
+ indentStep: string;
+ indentAtStart?: number;
+ inFlow: boolean | null;
+ inStringifyKey?: boolean;
+ options: Readonly<Required<Omit<ToStringOptions, 'collectionStyle' | 'indent'>>>;
+ resolvedAliases?: Set<Alias>;
+};
+export declare function createStringifyContext(doc: Document, options: ToStringOptions): StringifyContext;
+export declare function stringify(item: unknown, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
diff --git a/node_modules/yaml/dist/stringify/stringify.js b/node_modules/yaml/dist/stringify/stringify.js
new file mode 100644
index 0000000..b6fd122
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringify.js
@@ -0,0 +1,127 @@
+'use strict';
+
+var anchors = require('../doc/anchors.js');
+var Node = require('../nodes/Node.js');
+var stringifyComment = require('./stringifyComment.js');
+var stringifyString = require('./stringifyString.js');
+
+function createStringifyContext(doc, options) {
+ const opt = Object.assign({
+ blockQuote: true,
+ commentString: stringifyComment.stringifyComment,
+ defaultKeyType: null,
+ defaultStringType: 'PLAIN',
+ directives: null,
+ doubleQuotedAsJSON: false,
+ doubleQuotedMinMultiLineLength: 40,
+ falseStr: 'false',
+ indentSeq: true,
+ lineWidth: 80,
+ minContentWidth: 20,
+ nullStr: 'null',
+ simpleKeys: false,
+ singleQuote: null,
+ trueStr: 'true',
+ verifyAliasOrder: true
+ }, doc.schema.toStringOptions, options);
+ let inFlow;
+ switch (opt.collectionStyle) {
+ case 'block':
+ inFlow = false;
+ break;
+ case 'flow':
+ inFlow = true;
+ break;
+ default:
+ inFlow = null;
+ }
+ return {
+ anchors: new Set(),
+ doc,
+ indent: '',
+ indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',
+ inFlow,
+ options: opt
+ };
+}
+function getTagObject(tags, item) {
+ var _a, _b, _c, _d;
+ if (item.tag) {
+ const match = tags.filter(t => t.tag === item.tag);
+ if (match.length > 0)
+ return (_a = match.find(t => t.format === item.format)) !== null && _a !== void 0 ? _a : match[0];
+ }
+ let tagObj = undefined;
+ let obj;
+ if (Node.isScalar(item)) {
+ obj = item.value;
+ const match = tags.filter(t => { var _a; return (_a = t.identify) === null || _a === void 0 ? void 0 : _a.call(t, obj); });
+ tagObj =
+ (_b = match.find(t => t.format === item.format)) !== null && _b !== void 0 ? _b : match.find(t => !t.format);
+ }
+ else {
+ obj = item;
+ tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);
+ }
+ if (!tagObj) {
+ const name = (_d = (_c = obj === null || obj === void 0 ? void 0 : obj.constructor) === null || _c === void 0 ? void 0 : _c.name) !== null && _d !== void 0 ? _d : typeof obj;
+ throw new Error(`Tag not resolved for ${name} value`);
+ }
+ return tagObj;
+}
+// needs to be called before value stringifier to allow for circular anchor refs
+function stringifyProps(node, tagObj, { anchors: anchors$1, doc }) {
+ if (!doc.directives)
+ return '';
+ const props = [];
+ const anchor = (Node.isScalar(node) || Node.isCollection(node)) && node.anchor;
+ if (anchor && anchors.anchorIsValid(anchor)) {
+ anchors$1.add(anchor);
+ props.push(`&${anchor}`);
+ }
+ const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;
+ if (tag)
+ props.push(doc.directives.tagString(tag));
+ return props.join(' ');
+}
+function stringify(item, ctx, onComment, onChompKeep) {
+ var _a, _b;
+ if (Node.isPair(item))
+ return item.toString(ctx, onComment, onChompKeep);
+ if (Node.isAlias(item)) {
+ if (ctx.doc.directives)
+ return item.toString(ctx);
+ if ((_a = ctx.resolvedAliases) === null || _a === void 0 ? void 0 : _a.has(item)) {
+ throw new TypeError(`Cannot stringify circular structure without alias nodes`);
+ }
+ else {
+ if (ctx.resolvedAliases)
+ ctx.resolvedAliases.add(item);
+ else
+ ctx.resolvedAliases = new Set([item]);
+ item = item.resolve(ctx.doc);
+ }
+ }
+ let tagObj = undefined;
+ const node = Node.isNode(item)
+ ? item
+ : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });
+ if (!tagObj)
+ tagObj = getTagObject(ctx.doc.schema.tags, node);
+ const props = stringifyProps(node, tagObj, ctx);
+ if (props.length > 0)
+ ctx.indentAtStart = ((_b = ctx.indentAtStart) !== null && _b !== void 0 ? _b : 0) + props.length + 1;
+ const str = typeof tagObj.stringify === 'function'
+ ? tagObj.stringify(node, ctx, onComment, onChompKeep)
+ : Node.isScalar(node)
+ ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep)
+ : node.toString(ctx, onComment, onChompKeep);
+ if (!props)
+ return str;
+ return Node.isScalar(node) || str[0] === '{' || str[0] === '['
+ ? `${props} ${str}`
+ : `${props}\n${ctx.indent}${str}`;
+}
+
+exports.createStringifyContext = createStringifyContext;
+exports.stringify = stringify;
diff --git a/node_modules/yaml/dist/stringify/stringifyCollection.d.ts b/node_modules/yaml/dist/stringify/stringifyCollection.d.ts
new file mode 100644
index 0000000..207d703
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyCollection.d.ts
@@ -0,0 +1,17 @@
+import { Collection } from '../nodes/Collection.js';
+import { StringifyContext } from './stringify.js';
+interface StringifyCollectionOptions {
+ blockItemPrefix: string;
+ flowChars: {
+ start: '{';
+ end: '}';
+ } | {
+ start: '[';
+ end: ']';
+ };
+ itemIndent: string;
+ onChompKeep?: () => void;
+ onComment?: () => void;
+}
+export declare function stringifyCollection(collection: Readonly<Collection>, ctx: StringifyContext, options: StringifyCollectionOptions): string;
+export {};
diff --git a/node_modules/yaml/dist/stringify/stringifyCollection.js b/node_modules/yaml/dist/stringify/stringifyCollection.js
new file mode 100644
index 0000000..7fc848a
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyCollection.js
@@ -0,0 +1,154 @@
+'use strict';
+
+var Collection = require('../nodes/Collection.js');
+var Node = require('../nodes/Node.js');
+var stringify = require('./stringify.js');
+var stringifyComment = require('./stringifyComment.js');
+
+function stringifyCollection(collection, ctx, options) {
+ var _a;
+ const flow = (_a = ctx.inFlow) !== null && _a !== void 0 ? _a : collection.flow;
+ const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;
+ return stringify(collection, ctx, options);
+}
+function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {
+ const { indent, options: { commentString } } = ctx;
+ const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });
+ let chompKeep = false; // flag for the preceding node's status
+ const lines = [];
+ for (let i = 0; i < items.length; ++i) {
+ const item = items[i];
+ let comment = null;
+ if (Node.isNode(item)) {
+ if (!chompKeep && item.spaceBefore)
+ lines.push('');
+ addCommentBefore(ctx, lines, item.commentBefore, chompKeep);
+ if (item.comment)
+ comment = item.comment;
+ }
+ else if (Node.isPair(item)) {
+ const ik = Node.isNode(item.key) ? item.key : null;
+ if (ik) {
+ if (!chompKeep && ik.spaceBefore)
+ lines.push('');
+ addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);
+ }
+ }
+ chompKeep = false;
+ let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true));
+ if (comment)
+ str += stringifyComment.lineComment(str, itemIndent, commentString(comment));
+ if (chompKeep && comment)
+ chompKeep = false;
+ lines.push(blockItemPrefix + str);
+ }
+ let str;
+ if (lines.length === 0) {
+ str = flowChars.start + flowChars.end;
+ }
+ else {
+ str = lines[0];
+ for (let i = 1; i < lines.length; ++i) {
+ const line = lines[i];
+ str += line ? `\n${indent}${line}` : '\n';
+ }
+ }
+ if (comment) {
+ str += '\n' + stringifyComment.indentComment(commentString(comment), indent);
+ if (onComment)
+ onComment();
+ }
+ else if (chompKeep && onChompKeep)
+ onChompKeep();
+ return str;
+}
+function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {
+ const { indent, indentStep, options: { commentString } } = ctx;
+ itemIndent += indentStep;
+ const itemCtx = Object.assign({}, ctx, {
+ indent: itemIndent,
+ inFlow: true,
+ type: null
+ });
+ let reqNewline = false;
+ let linesAtValue = 0;
+ const lines = [];
+ for (let i = 0; i < items.length; ++i) {
+ const item = items[i];
+ let comment = null;
+ if (Node.isNode(item)) {
+ if (item.spaceBefore)
+ lines.push('');
+ addCommentBefore(ctx, lines, item.commentBefore, false);
+ if (item.comment)
+ comment = item.comment;
+ }
+ else if (Node.isPair(item)) {
+ const ik = Node.isNode(item.key) ? item.key : null;
+ if (ik) {
+ if (ik.spaceBefore)
+ lines.push('');
+ addCommentBefore(ctx, lines, ik.commentBefore, false);
+ if (ik.comment)
+ reqNewline = true;
+ }
+ const iv = Node.isNode(item.value) ? item.value : null;
+ if (iv) {
+ if (iv.comment)
+ comment = iv.comment;
+ if (iv.commentBefore)
+ reqNewline = true;
+ }
+ else if (item.value == null && ik && ik.comment) {
+ comment = ik.comment;
+ }
+ }
+ if (comment)
+ reqNewline = true;
+ let str = stringify.stringify(item, itemCtx, () => (comment = null));
+ if (i < items.length - 1)
+ str += ',';
+ if (comment)
+ str += stringifyComment.lineComment(str, itemIndent, commentString(comment));
+ if (!reqNewline && (lines.length > linesAtValue || str.includes('\n')))
+ reqNewline = true;
+ lines.push(str);
+ linesAtValue = lines.length;
+ }
+ let str;
+ const { start, end } = flowChars;
+ if (lines.length === 0) {
+ str = start + end;
+ }
+ else {
+ if (!reqNewline) {
+ const len = lines.reduce((sum, line) => sum + line.length + 2, 2);
+ reqNewline = len > Collection.Collection.maxFlowStringSingleLineLength;
+ }
+ if (reqNewline) {
+ str = start;
+ for (const line of lines)
+ str += line ? `\n${indentStep}${indent}${line}` : '\n';
+ str += `\n${indent}${end}`;
+ }
+ else {
+ str = `${start} ${lines.join(' ')} ${end}`;
+ }
+ }
+ if (comment) {
+ str += stringifyComment.lineComment(str, commentString(comment), indent);
+ if (onComment)
+ onComment();
+ }
+ return str;
+}
+function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {
+ if (comment && chompKeep)
+ comment = comment.replace(/^\n+/, '');
+ if (comment) {
+ const ic = stringifyComment.indentComment(commentString(comment), indent);
+ lines.push(ic.trimStart()); // Avoid double indent on first line
+ }
+}
+
+exports.stringifyCollection = stringifyCollection;
diff --git a/node_modules/yaml/dist/stringify/stringifyComment.d.ts b/node_modules/yaml/dist/stringify/stringifyComment.d.ts
new file mode 100644
index 0000000..9fcf48d
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyComment.d.ts
@@ -0,0 +1,10 @@
+/**
+ * Stringifies a comment.
+ *
+ * Empty comment lines are left empty,
+ * lines consisting of a single space are replaced by `#`,
+ * and all other lines are prefixed with a `#`.
+ */
+export declare const stringifyComment: (str: string) => string;
+export declare function indentComment(comment: string, indent: string): string;
+export declare const lineComment: (str: string, indent: string, comment: string) => string;
diff --git a/node_modules/yaml/dist/stringify/stringifyComment.js b/node_modules/yaml/dist/stringify/stringifyComment.js
new file mode 100644
index 0000000..26bf361
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyComment.js
@@ -0,0 +1,24 @@
+'use strict';
+
+/**
+ * Stringifies a comment.
+ *
+ * Empty comment lines are left empty,
+ * lines consisting of a single space are replaced by `#`,
+ * and all other lines are prefixed with a `#`.
+ */
+const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');
+function indentComment(comment, indent) {
+ if (/^\n+$/.test(comment))
+ return comment.substring(1);
+ return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;
+}
+const lineComment = (str, indent, comment) => str.endsWith('\n')
+ ? indentComment(comment, indent)
+ : comment.includes('\n')
+ ? '\n' + indentComment(comment, indent)
+ : (str.endsWith(' ') ? '' : ' ') + comment;
+
+exports.indentComment = indentComment;
+exports.lineComment = lineComment;
+exports.stringifyComment = stringifyComment;
diff --git a/node_modules/yaml/dist/stringify/stringifyDocument.d.ts b/node_modules/yaml/dist/stringify/stringifyDocument.d.ts
new file mode 100644
index 0000000..fb0633c
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyDocument.d.ts
@@ -0,0 +1,3 @@
+import { Document } from '../doc/Document.js';
+import { ToStringOptions } from '../options.js';
+export declare function stringifyDocument(doc: Readonly<Document>, options: ToStringOptions): string;
diff --git a/node_modules/yaml/dist/stringify/stringifyDocument.js b/node_modules/yaml/dist/stringify/stringifyDocument.js
new file mode 100644
index 0000000..ead7df6
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyDocument.js
@@ -0,0 +1,88 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var stringify = require('./stringify.js');
+var stringifyComment = require('./stringifyComment.js');
+
+function stringifyDocument(doc, options) {
+ var _a;
+ const lines = [];
+ let hasDirectives = options.directives === true;
+ if (options.directives !== false && doc.directives) {
+ const dir = doc.directives.toString(doc);
+ if (dir) {
+ lines.push(dir);
+ hasDirectives = true;
+ }
+ else if (doc.directives.docStart)
+ hasDirectives = true;
+ }
+ if (hasDirectives)
+ lines.push('---');
+ const ctx = stringify.createStringifyContext(doc, options);
+ const { commentString } = ctx.options;
+ if (doc.commentBefore) {
+ if (lines.length !== 1)
+ lines.unshift('');
+ const cs = commentString(doc.commentBefore);
+ lines.unshift(stringifyComment.indentComment(cs, ''));
+ }
+ let chompKeep = false;
+ let contentComment = null;
+ if (doc.contents) {
+ if (Node.isNode(doc.contents)) {
+ if (doc.contents.spaceBefore && hasDirectives)
+ lines.push('');
+ if (doc.contents.commentBefore) {
+ const cs = commentString(doc.contents.commentBefore);
+ lines.push(stringifyComment.indentComment(cs, ''));
+ }
+ // top-level block scalars need to be indented if followed by a comment
+ ctx.forceBlockIndent = !!doc.comment;
+ contentComment = doc.contents.comment;
+ }
+ const onChompKeep = contentComment ? undefined : () => (chompKeep = true);
+ let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep);
+ if (contentComment)
+ body += stringifyComment.lineComment(body, '', commentString(contentComment));
+ if ((body[0] === '|' || body[0] === '>') &&
+ lines[lines.length - 1] === '---') {
+ // Top-level block scalars with a preceding doc marker ought to use the
+ // same line for their header.
+ lines[lines.length - 1] = `--- ${body}`;
+ }
+ else
+ lines.push(body);
+ }
+ else {
+ lines.push(stringify.stringify(doc.contents, ctx));
+ }
+ if ((_a = doc.directives) === null || _a === void 0 ? void 0 : _a.docEnd) {
+ if (doc.comment) {
+ const cs = commentString(doc.comment);
+ if (cs.includes('\n')) {
+ lines.push('...');
+ lines.push(stringifyComment.indentComment(cs, ''));
+ }
+ else {
+ lines.push(`... ${cs}`);
+ }
+ }
+ else {
+ lines.push('...');
+ }
+ }
+ else {
+ let dc = doc.comment;
+ if (dc && chompKeep)
+ dc = dc.replace(/^\n+/, '');
+ if (dc) {
+ if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')
+ lines.push('');
+ lines.push(stringifyComment.indentComment(commentString(dc), ''));
+ }
+ }
+ return lines.join('\n') + '\n';
+}
+
+exports.stringifyDocument = stringifyDocument;
diff --git a/node_modules/yaml/dist/stringify/stringifyNumber.d.ts b/node_modules/yaml/dist/stringify/stringifyNumber.d.ts
new file mode 100644
index 0000000..3c14df1
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyNumber.d.ts
@@ -0,0 +1,2 @@
+import type { Scalar } from '../nodes/Scalar.js';
+export declare function stringifyNumber({ format, minFractionDigits, tag, value }: Scalar): string;
diff --git a/node_modules/yaml/dist/stringify/stringifyNumber.js b/node_modules/yaml/dist/stringify/stringifyNumber.js
new file mode 100644
index 0000000..4118ff6
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyNumber.js
@@ -0,0 +1,26 @@
+'use strict';
+
+function stringifyNumber({ format, minFractionDigits, tag, value }) {
+ if (typeof value === 'bigint')
+ return String(value);
+ const num = typeof value === 'number' ? value : Number(value);
+ if (!isFinite(num))
+ return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';
+ let n = JSON.stringify(value);
+ if (!format &&
+ minFractionDigits &&
+ (!tag || tag === 'tag:yaml.org,2002:float') &&
+ /^\d/.test(n)) {
+ let i = n.indexOf('.');
+ if (i < 0) {
+ i = n.length;
+ n += '.';
+ }
+ let d = minFractionDigits - (n.length - i - 1);
+ while (d-- > 0)
+ n += '0';
+ }
+ return n;
+}
+
+exports.stringifyNumber = stringifyNumber;
diff --git a/node_modules/yaml/dist/stringify/stringifyPair.d.ts b/node_modules/yaml/dist/stringify/stringifyPair.d.ts
new file mode 100644
index 0000000..c512149
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyPair.d.ts
@@ -0,0 +1,3 @@
+import type { Pair } from '../nodes/Pair.js';
+import { StringifyContext } from './stringify.js';
+export declare function stringifyPair({ key, value }: Readonly<Pair>, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
diff --git a/node_modules/yaml/dist/stringify/stringifyPair.js b/node_modules/yaml/dist/stringify/stringifyPair.js
new file mode 100644
index 0000000..f48a053
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyPair.js
@@ -0,0 +1,127 @@
+'use strict';
+
+var Node = require('../nodes/Node.js');
+var Scalar = require('../nodes/Scalar.js');
+var stringify = require('./stringify.js');
+var stringifyComment = require('./stringifyComment.js');
+
+function stringifyPair({ key, value }, ctx, onComment, onChompKeep) {
+ const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;
+ let keyComment = (Node.isNode(key) && key.comment) || null;
+ if (simpleKeys) {
+ if (keyComment) {
+ throw new Error('With simple keys, key nodes cannot have comments');
+ }
+ if (Node.isCollection(key)) {
+ const msg = 'With simple keys, collection cannot be used as a key value';
+ throw new Error(msg);
+ }
+ }
+ let explicitKey = !simpleKeys &&
+ (!key ||
+ (keyComment && value == null && !ctx.inFlow) ||
+ Node.isCollection(key) ||
+ (Node.isScalar(key)
+ ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL
+ : typeof key === 'object'));
+ ctx = Object.assign({}, ctx, {
+ allNullValues: false,
+ implicitKey: !explicitKey && (simpleKeys || !allNullValues),
+ indent: indent + indentStep
+ });
+ let keyCommentDone = false;
+ let chompKeep = false;
+ let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));
+ if (!explicitKey && !ctx.inFlow && str.length > 1024) {
+ if (simpleKeys)
+ throw new Error('With simple keys, single line scalar must not span more than 1024 characters');
+ explicitKey = true;
+ }
+ if (ctx.inFlow) {
+ if (allNullValues || value == null) {
+ if (keyCommentDone && onComment)
+ onComment();
+ return str === '' ? '?' : explicitKey ? `? ${str}` : str;
+ }
+ }
+ else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {
+ str = `? ${str}`;
+ if (keyComment && !keyCommentDone) {
+ str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));
+ }
+ else if (chompKeep && onChompKeep)
+ onChompKeep();
+ return str;
+ }
+ if (keyCommentDone)
+ keyComment = null;
+ if (explicitKey) {
+ if (keyComment)
+ str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));
+ str = `? ${str}\n${indent}:`;
+ }
+ else {
+ str = `${str}:`;
+ if (keyComment)
+ str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));
+ }
+ let vcb = '';
+ let valueComment = null;
+ if (Node.isNode(value)) {
+ if (value.spaceBefore)
+ vcb = '\n';
+ if (value.commentBefore) {
+ const cs = commentString(value.commentBefore);
+ vcb += `\n${stringifyComment.indentComment(cs, ctx.indent)}`;
+ }
+ valueComment = value.comment;
+ }
+ else if (value && typeof value === 'object') {
+ value = doc.createNode(value);
+ }
+ ctx.implicitKey = false;
+ if (!explicitKey && !keyComment && Node.isScalar(value))
+ ctx.indentAtStart = str.length + 1;
+ chompKeep = false;
+ if (!indentSeq &&
+ indentStep.length >= 2 &&
+ !ctx.inFlow &&
+ !explicitKey &&
+ Node.isSeq(value) &&
+ !value.flow &&
+ !value.tag &&
+ !value.anchor) {
+ // If indentSeq === false, consider '- ' as part of indentation where possible
+ ctx.indent = ctx.indent.substr(2);
+ }
+ let valueCommentDone = false;
+ const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));
+ let ws = ' ';
+ if (vcb || keyComment) {
+ if (valueStr === '' && !ctx.inFlow)
+ ws = vcb === '\n' ? '\n\n' : vcb;
+ else
+ ws = `${vcb}\n${ctx.indent}`;
+ }
+ else if (!explicitKey && Node.isCollection(value)) {
+ const flow = valueStr[0] === '[' || valueStr[0] === '{';
+ if (!flow || valueStr.includes('\n'))
+ ws = `\n${ctx.indent}`;
+ }
+ else if (valueStr === '' || valueStr[0] === '\n')
+ ws = '';
+ str += ws + valueStr;
+ if (ctx.inFlow) {
+ if (valueCommentDone && onComment)
+ onComment();
+ }
+ else if (valueComment && !valueCommentDone) {
+ str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment));
+ }
+ else if (chompKeep && onChompKeep) {
+ onChompKeep();
+ }
+ return str;
+}
+
+exports.stringifyPair = stringifyPair;
diff --git a/node_modules/yaml/dist/stringify/stringifyString.d.ts b/node_modules/yaml/dist/stringify/stringifyString.d.ts
new file mode 100644
index 0000000..a9904b9
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyString.d.ts
@@ -0,0 +1,3 @@
+import { Scalar } from '../nodes/Scalar.js';
+import type { StringifyContext } from './stringify.js';
+export declare function stringifyString(item: Scalar, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string;
diff --git a/node_modules/yaml/dist/stringify/stringifyString.js b/node_modules/yaml/dist/stringify/stringifyString.js
new file mode 100644
index 0000000..8a41503
--- /dev/null
+++ b/node_modules/yaml/dist/stringify/stringifyString.js
@@ -0,0 +1,316 @@
+'use strict';
+
+var Scalar = require('../nodes/Scalar.js');
+var foldFlowLines = require('./foldFlowLines.js');
+
+const getFoldOptions = (ctx) => ({
+ indentAtStart: ctx.indentAtStart,
+ lineWidth: ctx.options.lineWidth,
+ minContentWidth: ctx.options.minContentWidth
+});
+// Also checks for lines starting with %, as parsing the output as YAML 1.1 will
+// presume that's starting a new document.
+const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str);
+function lineLengthOverLimit(str, lineWidth, indentLength) {
+ if (!lineWidth || lineWidth < 0)
+ return false;
+ const limit = lineWidth - indentLength;
+ const strLen = str.length;
+ if (strLen <= limit)
+ return false;
+ for (let i = 0, start = 0; i < strLen; ++i) {
+ if (str[i] === '\n') {
+ if (i - start > limit)
+ return true;
+ start = i + 1;
+ if (strLen - start <= limit)
+ return false;
+ }
+ }
+ return true;
+}
+function doubleQuotedString(value, ctx) {
+ const json = JSON.stringify(value);
+ if (ctx.options.doubleQuotedAsJSON)
+ return json;
+ const { implicitKey } = ctx;
+ const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;
+ const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
+ let str = '';
+ let start = 0;
+ for (let i = 0, ch = json[i]; ch; ch = json[++i]) {
+ if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') {
+ // space before newline needs to be escaped to not be folded
+ str += json.slice(start, i) + '\\ ';
+ i += 1;
+ start = i;
+ ch = '\\';
+ }
+ if (ch === '\\')
+ switch (json[i + 1]) {
+ case 'u':
+ {
+ str += json.slice(start, i);
+ const code = json.substr(i + 2, 4);
+ switch (code) {
+ case '0000':
+ str += '\\0';
+ break;
+ case '0007':
+ str += '\\a';
+ break;
+ case '000b':
+ str += '\\v';
+ break;
+ case '001b':
+ str += '\\e';
+ break;
+ case '0085':
+ str += '\\N';
+ break;
+ case '00a0':
+ str += '\\_';
+ break;
+ case '2028':
+ str += '\\L';
+ break;
+ case '2029':
+ str += '\\P';
+ break;
+ default:
+ if (code.substr(0, 2) === '00')
+ str += '\\x' + code.substr(2);
+ else
+ str += json.substr(i, 6);
+ }
+ i += 5;
+ start = i + 1;
+ }
+ break;
+ case 'n':
+ if (implicitKey ||
+ json[i + 2] === '"' ||
+ json.length < minMultiLineLength) {
+ i += 1;
+ }
+ else {
+ // folding will eat first newline
+ str += json.slice(start, i) + '\n\n';
+ while (json[i + 2] === '\\' &&
+ json[i + 3] === 'n' &&
+ json[i + 4] !== '"') {
+ str += '\n';
+ i += 2;
+ }
+ str += indent;
+ // space after newline needs to be escaped to not be folded
+ if (json[i + 2] === ' ')
+ str += '\\';
+ i += 1;
+ start = i + 1;
+ }
+ break;
+ default:
+ i += 1;
+ }
+ }
+ str = start ? str + json.slice(start) : json;
+ return implicitKey
+ ? str
+ : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx));
+}
+function singleQuotedString(value, ctx) {
+ if (ctx.options.singleQuote === false ||
+ (ctx.implicitKey && value.includes('\n')) ||
+ /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline
+ )
+ return doubleQuotedString(value, ctx);
+ const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
+ const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'";
+ return ctx.implicitKey
+ ? res
+ : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));
+}
+function quotedString(value, ctx) {
+ const { singleQuote } = ctx.options;
+ let qs;
+ if (singleQuote === false)
+ qs = doubleQuotedString;
+ else {
+ const hasDouble = value.includes('"');
+ const hasSingle = value.includes("'");
+ if (hasDouble && !hasSingle)
+ qs = singleQuotedString;
+ else if (hasSingle && !hasDouble)
+ qs = doubleQuotedString;
+ else
+ qs = singleQuote ? singleQuotedString : doubleQuotedString;
+ }
+ return qs(value, ctx);
+}
+function blockString({ comment, type, value }, ctx, onComment, onChompKeep) {
+ const { blockQuote, commentString, lineWidth } = ctx.options;
+ // 1. Block can't end in whitespace unless the last line is non-empty.
+ // 2. Strings consisting of only whitespace are best rendered explicitly.
+ if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) {
+ return quotedString(value, ctx);
+ }
+ const indent = ctx.indent ||
+ (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');
+ const literal = blockQuote === 'literal'
+ ? true
+ : blockQuote === 'folded' || type === Scalar.Scalar.BLOCK_FOLDED
+ ? false
+ : type === Scalar.Scalar.BLOCK_LITERAL
+ ? true
+ : !lineLengthOverLimit(value, lineWidth, indent.length);
+ if (!value)
+ return literal ? '|\n' : '>\n';
+ // determine chomping from whitespace at value end
+ let chomp;
+ let endStart;
+ for (endStart = value.length; endStart > 0; --endStart) {
+ const ch = value[endStart - 1];
+ if (ch !== '\n' && ch !== '\t' && ch !== ' ')
+ break;
+ }
+ let end = value.substring(endStart);
+ const endNlPos = end.indexOf('\n');
+ if (endNlPos === -1) {
+ chomp = '-'; // strip
+ }
+ else if (value === end || endNlPos !== end.length - 1) {
+ chomp = '+'; // keep
+ if (onChompKeep)
+ onChompKeep();
+ }
+ else {
+ chomp = ''; // clip
+ }
+ if (end) {
+ value = value.slice(0, -end.length);
+ if (end[end.length - 1] === '\n')
+ end = end.slice(0, -1);
+ end = end.replace(/\n+(?!\n|$)/g, `$&${indent}`);
+ }
+ // determine indent indicator from whitespace at value start
+ let startWithSpace = false;
+ let startEnd;
+ let startNlPos = -1;
+ for (startEnd = 0; startEnd < value.length; ++startEnd) {
+ const ch = value[startEnd];
+ if (ch === ' ')
+ startWithSpace = true;
+ else if (ch === '\n')
+ startNlPos = startEnd;
+ else
+ break;
+ }
+ let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);
+ if (start) {
+ value = value.substring(start.length);
+ start = start.replace(/\n+/g, `$&${indent}`);
+ }
+ const indentSize = indent ? '2' : '1'; // root is at -1
+ let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;
+ if (comment) {
+ header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' '));
+ if (onComment)
+ onComment();
+ }
+ if (literal) {
+ value = value.replace(/\n+/g, `$&${indent}`);
+ return `${header}\n${indent}${start}${value}${end}`;
+ }
+ value = value
+ .replace(/\n+/g, '\n$&')
+ .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
+ // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent
+ .replace(/\n+/g, `$&${indent}`);
+ const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx));
+ return `${header}\n${indent}${body}`;
+}
+function plainString(item, ctx, onComment, onChompKeep) {
+ const { type, value } = item;
+ const { actualString, implicitKey, indent, inFlow } = ctx;
+ if ((implicitKey && /[\n[\]{},]/.test(value)) ||
+ (inFlow && /[[\]{},]/.test(value))) {
+ return quotedString(value, ctx);
+ }
+ if (!value ||
+ /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
+ // not allowed:
+ // - empty string, '-' or '?'
+ // - start with an indicator character (except [?:-]) or /[?-] /
+ // - '\n ', ': ' or ' \n' anywhere
+ // - '#' not preceded by a non-space char
+ // - end with ' ' or ':'
+ return implicitKey || inFlow || !value.includes('\n')
+ ? quotedString(value, ctx)
+ : blockString(item, ctx, onComment, onChompKeep);
+ }
+ if (!implicitKey &&
+ !inFlow &&
+ type !== Scalar.Scalar.PLAIN &&
+ value.includes('\n')) {
+ // Where allowed & type not set explicitly, prefer block style for multiline strings
+ return blockString(item, ctx, onComment, onChompKeep);
+ }
+ if (indent === '' && containsDocumentMarker(value)) {
+ ctx.forceBlockIndent = true;
+ return blockString(item, ctx, onComment, onChompKeep);
+ }
+ const str = value.replace(/\n+/g, `$&\n${indent}`);
+ // Verify that output will be parsed as a string, as e.g. plain numbers and
+ // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),
+ // and others in v1.1.
+ if (actualString) {
+ const test = (tag) => { var _a; return tag.default && tag.tag !== 'tag:yaml.org,2002:str' && ((_a = tag.test) === null || _a === void 0 ? void 0 : _a.test(str)); };
+ const { compat, tags } = ctx.doc.schema;
+ if (tags.some(test) || (compat === null || compat === void 0 ? void 0 : compat.some(test)))
+ return quotedString(value, ctx);
+ }
+ return implicitKey
+ ? str
+ : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx));
+}
+function stringifyString(item, ctx, onComment, onChompKeep) {
+ const { implicitKey, inFlow } = ctx;
+ const ss = typeof item.value === 'string'
+ ? item
+ : Object.assign({}, item, { value: String(item.value) });
+ let { type } = item;
+ if (type !== Scalar.Scalar.QUOTE_DOUBLE) {
+ // force double quotes on control characters & unpaired surrogates
+ if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value))
+ type = Scalar.Scalar.QUOTE_DOUBLE;
+ }
+ const _stringify = (_type) => {
+ switch (_type) {
+ case Scalar.Scalar.BLOCK_FOLDED:
+ case Scalar.Scalar.BLOCK_LITERAL:
+ return implicitKey || inFlow
+ ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers
+ : blockString(ss, ctx, onComment, onChompKeep);
+ case Scalar.Scalar.QUOTE_DOUBLE:
+ return doubleQuotedString(ss.value, ctx);
+ case Scalar.Scalar.QUOTE_SINGLE:
+ return singleQuotedString(ss.value, ctx);
+ case Scalar.Scalar.PLAIN:
+ return plainString(ss, ctx, onComment, onChompKeep);
+ default:
+ return null;
+ }
+ };
+ let res = _stringify(type);
+ if (res === null) {
+ const { defaultKeyType, defaultStringType } = ctx.options;
+ const t = (implicitKey && defaultKeyType) || defaultStringType;
+ res = _stringify(t);
+ if (res === null)
+ throw new Error(`Unsupported default string type ${t}`);
+ }
+ return res;
+}
+
+exports.stringifyString = stringifyString;
diff --git a/node_modules/yaml/dist/test-events.d.ts b/node_modules/yaml/dist/test-events.d.ts
new file mode 100644
index 0000000..d1a2348
--- /dev/null
+++ b/node_modules/yaml/dist/test-events.d.ts
@@ -0,0 +1,4 @@
+export declare function testEvents(src: string): {
+ events: string[];
+ error: unknown;
+};
diff --git a/node_modules/yaml/dist/test-events.js b/node_modules/yaml/dist/test-events.js
new file mode 100644
index 0000000..3927b30
--- /dev/null
+++ b/node_modules/yaml/dist/test-events.js
@@ -0,0 +1,135 @@
+'use strict';
+
+var Node = require('./nodes/Node.js');
+var publicApi = require('./public-api.js');
+var visit = require('./visit.js');
+
+const scalarChar = {
+ BLOCK_FOLDED: '>',
+ BLOCK_LITERAL: '|',
+ PLAIN: ':',
+ QUOTE_DOUBLE: '"',
+ QUOTE_SINGLE: "'"
+};
+function anchorExists(doc, anchor) {
+ let found = false;
+ visit.visit(doc, {
+ Value(_key, node) {
+ if (node.anchor === anchor) {
+ found = true;
+ return visit.visit.BREAK;
+ }
+ }
+ });
+ return found;
+}
+// test harness for yaml-test-suite event tests
+function testEvents(src) {
+ var _a;
+ const docs = publicApi.parseAllDocuments(src);
+ const errDoc = docs.find(doc => doc.errors.length > 0);
+ const error = errDoc ? errDoc.errors[0].message : null;
+ const events = ['+STR'];
+ try {
+ for (let i = 0; i < docs.length; ++i) {
+ const doc = docs[i];
+ let root = doc.contents;
+ if (Array.isArray(root))
+ root = root[0];
+ const [rootStart] = doc.range || [0];
+ const error = doc.errors[0];
+ if (error && (!error.pos || error.pos[0] < rootStart))
+ throw new Error();
+ let docStart = '+DOC';
+ if (doc.directives.docStart)
+ docStart += ' ---';
+ else if (doc.contents &&
+ doc.contents.range[2] === doc.contents.range[0] &&
+ !doc.contents.anchor &&
+ !doc.contents.tag)
+ continue;
+ events.push(docStart);
+ addEvents(events, doc, (_a = error === null || error === void 0 ? void 0 : error.pos[0]) !== null && _a !== void 0 ? _a : -1, root);
+ let docEnd = '-DOC';
+ if (doc.directives.docEnd)
+ docEnd += ' ...';
+ events.push(docEnd);
+ }
+ }
+ catch (e) {
+ return { events, error: error !== null && error !== void 0 ? error : e };
+ }
+ events.push('-STR');
+ return { events, error };
+}
+function addEvents(events, doc, errPos, node) {
+ if (!node) {
+ events.push('=VAL :');
+ return;
+ }
+ if (errPos !== -1 && Node.isNode(node) && node.range[0] >= errPos)
+ throw new Error();
+ let props = '';
+ let anchor = Node.isScalar(node) || Node.isCollection(node) ? node.anchor : undefined;
+ if (anchor) {
+ if (/\d$/.test(anchor)) {
+ const alt = anchor.replace(/\d$/, '');
+ if (anchorExists(doc, alt))
+ anchor = alt;
+ }
+ props = ` &${anchor}`;
+ }
+ if (Node.isNode(node) && node.tag)
+ props += ` <${node.tag}>`;
+ if (Node.isMap(node)) {
+ const ev = node.flow ? '+MAP {}' : '+MAP';
+ events.push(`${ev}${props}`);
+ node.items.forEach(({ key, value }) => {
+ addEvents(events, doc, errPos, key);
+ addEvents(events, doc, errPos, value);
+ });
+ events.push('-MAP');
+ }
+ else if (Node.isSeq(node)) {
+ const ev = node.flow ? '+SEQ []' : '+SEQ';
+ events.push(`${ev}${props}`);
+ node.items.forEach(item => {
+ addEvents(events, doc, errPos, item);
+ });
+ events.push('-SEQ');
+ }
+ else if (Node.isPair(node)) {
+ events.push(`+MAP${props}`);
+ addEvents(events, doc, errPos, node.key);
+ addEvents(events, doc, errPos, node.value);
+ events.push('-MAP');
+ }
+ else if (Node.isAlias(node)) {
+ let alias = node.source;
+ if (alias && /\d$/.test(alias)) {
+ const alt = alias.replace(/\d$/, '');
+ if (anchorExists(doc, alt))
+ alias = alt;
+ }
+ events.push(`=ALI${props} *${alias}`);
+ }
+ else {
+ const scalar = scalarChar[String(node.type)];
+ if (!scalar)
+ throw new Error(`Unexpected node type ${node.type}`);
+ const value = node.source
+ .replace(/\\/g, '\\\\')
+ .replace(/\0/g, '\\0')
+ .replace(/\x07/g, '\\a')
+ .replace(/\x08/g, '\\b')
+ .replace(/\t/g, '\\t')
+ .replace(/\n/g, '\\n')
+ .replace(/\v/g, '\\v')
+ .replace(/\f/g, '\\f')
+ .replace(/\r/g, '\\r')
+ .replace(/\x1b/g, '\\e');
+ events.push(`=VAL${props} ${scalar}${value}`);
+ }
+}
+
+exports.testEvents = testEvents;
diff --git a/node_modules/yaml/dist/util.d.ts b/node_modules/yaml/dist/util.d.ts
new file mode 100644
index 0000000..f1012d7
--- /dev/null
+++ b/node_modules/yaml/dist/util.d.ts
@@ -0,0 +1,9 @@
+export { debug, LogLevelId, warn } from './log.js';
+export { findPair } from './nodes/YAMLMap.js';
+export { toJS, ToJSContext } from './nodes/toJS.js';
+export { map as mapTag } from './schema/common/map.js';
+export { seq as seqTag } from './schema/common/seq.js';
+export { string as stringTag } from './schema/common/string.js';
+export { foldFlowLines } from './stringify/foldFlowLines';
+export { stringifyNumber } from './stringify/stringifyNumber.js';
+export { stringifyString } from './stringify/stringifyString.js';
diff --git a/node_modules/yaml/dist/util.js b/node_modules/yaml/dist/util.js
new file mode 100644
index 0000000..9a38c10
--- /dev/null
+++ b/node_modules/yaml/dist/util.js
@@ -0,0 +1,24 @@
+'use strict';
+
+var log = require('./log.js');
+var YAMLMap = require('./nodes/YAMLMap.js');
+var toJS = require('./nodes/toJS.js');
+var map = require('./schema/common/map.js');
+var seq = require('./schema/common/seq.js');
+var string = require('./schema/common/string.js');
+var foldFlowLines = require('./stringify/foldFlowLines.js');
+var stringifyNumber = require('./stringify/stringifyNumber.js');
+var stringifyString = require('./stringify/stringifyString.js');
+
+
+
+exports.debug = log.debug;
+exports.warn = log.warn;
+exports.findPair = YAMLMap.findPair;
+exports.toJS = toJS.toJS;
+exports.mapTag = map.map;
+exports.seqTag = seq.seq;
+exports.stringTag = string.string;
+exports.foldFlowLines = foldFlowLines.foldFlowLines;
+exports.stringifyNumber = stringifyNumber.stringifyNumber;
+exports.stringifyString = stringifyString.stringifyString;
diff --git a/node_modules/yaml/dist/visit.d.ts b/node_modules/yaml/dist/visit.d.ts
new file mode 100644
index 0000000..7926c00
--- /dev/null
+++ b/node_modules/yaml/dist/visit.d.ts
@@ -0,0 +1,102 @@
+import type { Document } from './doc/Document.js';
+import type { Alias } from './nodes/Alias.js';
+import { Node } from './nodes/Node.js';
+import type { Pair } from './nodes/Pair.js';
+import type { Scalar } from './nodes/Scalar.js';
+import type { YAMLMap } from './nodes/YAMLMap.js';
+import type { YAMLSeq } from './nodes/YAMLSeq.js';
+export declare type visitorFn<T> = (key: number | 'key' | 'value' | null, node: T, path: readonly (Document | Node | Pair)[]) => void | symbol | number | Node | Pair;
+export declare type visitor = visitorFn<unknown> | {
+ Alias?: visitorFn<Alias>;
+ Collection?: visitorFn<YAMLMap | YAMLSeq>;
+ Map?: visitorFn<YAMLMap>;
+ Node?: visitorFn<Alias | Scalar | YAMLMap | YAMLSeq>;
+ Pair?: visitorFn<Pair>;
+ Scalar?: visitorFn<Scalar>;
+ Seq?: visitorFn<YAMLSeq>;
+ Value?: visitorFn<Scalar | YAMLMap | YAMLSeq>;
+};
+export declare type asyncVisitorFn<T> = (key: number | 'key' | 'value' | null, node: T, path: readonly (Document | Node | Pair)[]) => void | symbol | number | Node | Pair | Promise<void | symbol | number | Node | Pair>;
+export declare type asyncVisitor = asyncVisitorFn<unknown> | {
+ Alias?: asyncVisitorFn<Alias>;
+ Collection?: asyncVisitorFn<YAMLMap | YAMLSeq>;
+ Map?: asyncVisitorFn<YAMLMap>;
+ Node?: asyncVisitorFn<Alias | Scalar | YAMLMap | YAMLSeq>;
+ Pair?: asyncVisitorFn<Pair>;
+ Scalar?: asyncVisitorFn<Scalar>;
+ Seq?: asyncVisitorFn<YAMLSeq>;
+ Value?: asyncVisitorFn<Scalar | YAMLMap | YAMLSeq>;
+};
+/**
+ * Apply a visitor to an AST node or document.
+ *
+ * Walks through the tree (depth-first) starting from `node`, calling a
+ * `visitor` function with three arguments:
+ * - `key`: For sequence values and map `Pair`, the node's index in the
+ * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
+ * `null` for the root node.
+ * - `node`: The current node.
+ * - `path`: The ancestry of the current node.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this node, continue with next
+ * sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current node, then continue with the next one
+ * - `Node`: Replace the current node, then continue by visiting it
+ * - `number`: While iterating the items of a sequence or map, set the index
+ * of the next step. This is useful especially if the index of the current
+ * node has changed.
+ *
+ * If `visitor` is a single function, it will be called with all values
+ * encountered in the tree, including e.g. `null` values. Alternatively,
+ * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
+ * `Alias` and `Scalar` node. To define the same visitor function for more than
+ * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
+ * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
+ * specific defined one will be used for each node.
+ */
+export declare function visit(node: Node | Document | null, visitor: visitor): void;
+export declare namespace visit {
+ var BREAK: symbol;
+ var SKIP: symbol;
+ var REMOVE: symbol;
+}
+/**
+ * Apply an async visitor to an AST node or document.
+ *
+ * Walks through the tree (depth-first) starting from `node`, calling a
+ * `visitor` function with three arguments:
+ * - `key`: For sequence values and map `Pair`, the node's index in the
+ * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
+ * `null` for the root node.
+ * - `node`: The current node.
+ * - `path`: The ancestry of the current node.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `Promise`: Must resolve to one of the following values
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this node, continue with next
+ * sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current node, then continue with the next one
+ * - `Node`: Replace the current node, then continue by visiting it
+ * - `number`: While iterating the items of a sequence or map, set the index
+ * of the next step. This is useful especially if the index of the current
+ * node has changed.
+ *
+ * If `visitor` is a single function, it will be called with all values
+ * encountered in the tree, including e.g. `null` values. Alternatively,
+ * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
+ * `Alias` and `Scalar` node. To define the same visitor function for more than
+ * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
+ * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
+ * specific defined one will be used for each node.
+ */
+export declare function visitAsync(node: Node | Document | null, visitor: asyncVisitor): Promise<void>;
+export declare namespace visitAsync {
+ var BREAK: symbol;
+ var SKIP: symbol;
+ var REMOVE: symbol;
+}
diff --git a/node_modules/yaml/dist/visit.js b/node_modules/yaml/dist/visit.js
new file mode 100644
index 0000000..aba95b8
--- /dev/null
+++ b/node_modules/yaml/dist/visit.js
@@ -0,0 +1,237 @@
+'use strict';
+
+var Node = require('./nodes/Node.js');
+
+const BREAK = Symbol('break visit');
+const SKIP = Symbol('skip children');
+const REMOVE = Symbol('remove node');
+/**
+ * Apply a visitor to an AST node or document.
+ *
+ * Walks through the tree (depth-first) starting from `node`, calling a
+ * `visitor` function with three arguments:
+ * - `key`: For sequence values and map `Pair`, the node's index in the
+ * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
+ * `null` for the root node.
+ * - `node`: The current node.
+ * - `path`: The ancestry of the current node.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this node, continue with next
+ * sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current node, then continue with the next one
+ * - `Node`: Replace the current node, then continue by visiting it
+ * - `number`: While iterating the items of a sequence or map, set the index
+ * of the next step. This is useful especially if the index of the current
+ * node has changed.
+ *
+ * If `visitor` is a single function, it will be called with all values
+ * encountered in the tree, including e.g. `null` values. Alternatively,
+ * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
+ * `Alias` and `Scalar` node. To define the same visitor function for more than
+ * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
+ * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
+ * specific defined one will be used for each node.
+ */
+function visit(node, visitor) {
+ const visitor_ = initVisitor(visitor);
+ if (Node.isDocument(node)) {
+ const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));
+ if (cd === REMOVE)
+ node.contents = null;
+ }
+ else
+ visit_(null, node, visitor_, Object.freeze([]));
+}
+// Without the `as symbol` casts, TS declares these in the `visit`
+// namespace using `var`, but then complains about that because
+// `unique symbol` must be `const`.
+/** Terminate visit traversal completely */
+visit.BREAK = BREAK;
+/** Do not visit the children of the current node */
+visit.SKIP = SKIP;
+/** Remove the current node */
+visit.REMOVE = REMOVE;
+function visit_(key, node, visitor, path) {
+ const ctrl = callVisitor(key, node, visitor, path);
+ if (Node.isNode(ctrl) || Node.isPair(ctrl)) {
+ replaceNode(key, path, ctrl);
+ return visit_(key, ctrl, visitor, path);
+ }
+ if (typeof ctrl !== 'symbol') {
+ if (Node.isCollection(node)) {
+ path = Object.freeze(path.concat(node));
+ for (let i = 0; i < node.items.length; ++i) {
+ const ci = visit_(i, node.items[i], visitor, path);
+ if (typeof ci === 'number')
+ i = ci - 1;
+ else if (ci === BREAK)
+ return BREAK;
+ else if (ci === REMOVE) {
+ node.items.splice(i, 1);
+ i -= 1;
+ }
+ }
+ }
+ else if (Node.isPair(node)) {
+ path = Object.freeze(path.concat(node));
+ const ck = visit_('key', node.key, visitor, path);
+ if (ck === BREAK)
+ return BREAK;
+ else if (ck === REMOVE)
+ node.key = null;
+ const cv = visit_('value', node.value, visitor, path);
+ if (cv === BREAK)
+ return BREAK;
+ else if (cv === REMOVE)
+ node.value = null;
+ }
+ }
+ return ctrl;
+}
+/**
+ * Apply an async visitor to an AST node or document.
+ *
+ * Walks through the tree (depth-first) starting from `node`, calling a
+ * `visitor` function with three arguments:
+ * - `key`: For sequence values and map `Pair`, the node's index in the
+ * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
+ * `null` for the root node.
+ * - `node`: The current node.
+ * - `path`: The ancestry of the current node.
+ *
+ * The return value of the visitor may be used to control the traversal:
+ * - `Promise`: Must resolve to one of the following values
+ * - `undefined` (default): Do nothing and continue
+ * - `visit.SKIP`: Do not visit the children of this node, continue with next
+ * sibling
+ * - `visit.BREAK`: Terminate traversal completely
+ * - `visit.REMOVE`: Remove the current node, then continue with the next one
+ * - `Node`: Replace the current node, then continue by visiting it
+ * - `number`: While iterating the items of a sequence or map, set the index
+ * of the next step. This is useful especially if the index of the current
+ * node has changed.
+ *
+ * If `visitor` is a single function, it will be called with all values
+ * encountered in the tree, including e.g. `null` values. Alternatively,
+ * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
+ * `Alias` and `Scalar` node. To define the same visitor function for more than
+ * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
+ * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
+ * specific defined one will be used for each node.
+ */
+async function visitAsync(node, visitor) {
+ const visitor_ = initVisitor(visitor);
+ if (Node.isDocument(node)) {
+ const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));
+ if (cd === REMOVE)
+ node.contents = null;
+ }
+ else
+ await visitAsync_(null, node, visitor_, Object.freeze([]));
+}
+// Without the `as symbol` casts, TS declares these in the `visit`
+// namespace using `var`, but then complains about that because
+// `unique symbol` must be `const`.
+/** Terminate visit traversal completely */
+visitAsync.BREAK = BREAK;
+/** Do not visit the children of the current node */
+visitAsync.SKIP = SKIP;
+/** Remove the current node */
+visitAsync.REMOVE = REMOVE;
+async function visitAsync_(key, node, visitor, path) {
+ const ctrl = await callVisitor(key, node, visitor, path);
+ if (Node.isNode(ctrl) || Node.isPair(ctrl)) {
+ replaceNode(key, path, ctrl);
+ return visitAsync_(key, ctrl, visitor, path);
+ }
+ if (typeof ctrl !== 'symbol') {
+ if (Node.isCollection(node)) {
+ path = Object.freeze(path.concat(node));
+ for (let i = 0; i < node.items.length; ++i) {
+ const ci = await visitAsync_(i, node.items[i], visitor, path);
+ if (typeof ci === 'number')
+ i = ci - 1;
+ else if (ci === BREAK)
+ return BREAK;
+ else if (ci === REMOVE) {
+ node.items.splice(i, 1);
+ i -= 1;
+ }
+ }
+ }
+ else if (Node.isPair(node)) {
+ path = Object.freeze(path.concat(node));
+ const ck = await visitAsync_('key', node.key, visitor, path);
+ if (ck === BREAK)
+ return BREAK;
+ else if (ck === REMOVE)
+ node.key = null;
+ const cv = await visitAsync_('value', node.value, visitor, path);
+ if (cv === BREAK)
+ return BREAK;
+ else if (cv === REMOVE)
+ node.value = null;
+ }
+ }
+ return ctrl;
+}
+function initVisitor(visitor) {
+ if (typeof visitor === 'object' &&
+ (visitor.Collection || visitor.Node || visitor.Value)) {
+ return Object.assign({
+ Alias: visitor.Node,
+ Map: visitor.Node,
+ Scalar: visitor.Node,
+ Seq: visitor.Node
+ }, visitor.Value && {
+ Map: visitor.Value,
+ Scalar: visitor.Value,
+ Seq: visitor.Value
+ }, visitor.Collection && {
+ Map: visitor.Collection,
+ Seq: visitor.Collection
+ }, visitor);
+ }
+ return visitor;
+}
+function callVisitor(key, node, visitor, path) {
+ var _a, _b, _c, _d, _e;
+ if (typeof visitor === 'function')
+ return visitor(key, node, path);
+ if (Node.isMap(node))
+ return (_a = visitor.Map) === null || _a === void 0 ? void 0 : _a.call(visitor, key, node, path);
+ if (Node.isSeq(node))
+ return (_b = visitor.Seq) === null || _b === void 0 ? void 0 : _b.call(visitor, key, node, path);
+ if (Node.isPair(node))
+ return (_c = visitor.Pair) === null || _c === void 0 ? void 0 : _c.call(visitor, key, node, path);
+ if (Node.isScalar(node))
+ return (_d = visitor.Scalar) === null || _d === void 0 ? void 0 : _d.call(visitor, key, node, path);
+ if (Node.isAlias(node))
+ return (_e = visitor.Alias) === null || _e === void 0 ? void 0 : _e.call(visitor, key, node, path);
+ return undefined;
+}
+function replaceNode(key, path, node) {
+ const parent = path[path.length - 1];
+ if (Node.isCollection(parent)) {
+ parent.items[key] = node;
+ }
+ else if (Node.isPair(parent)) {
+ if (key === 'key')
+ parent.key = node;
+ else
+ parent.value = node;
+ }
+ else if (Node.isDocument(parent)) {
+ parent.contents = node;
+ }
+ else {
+ const pt = Node.isAlias(parent) ? 'alias' : 'scalar';
+ throw new Error(`Cannot replace node with ${pt} parent`);
+ }
+}
+
+exports.visit = visit;
+exports.visitAsync = visitAsync;