aboutsummaryrefslogtreecommitdiff
path: root/node_modules/strtok3/lib
diff options
context:
space:
mode:
authorMinteck <freeziv.ytb@gmail.com>2021-03-07 18:29:17 +0100
committerMinteck <freeziv.ytb@gmail.com>2021-03-07 18:29:17 +0100
commit0f79e708bf07721b73ea41e5d341be08e8ea4dce (patch)
treef3c63cd6a9f4ef0b26f95eec6a031600232e80c8 /node_modules/strtok3/lib
downloadelectrode-0f79e708bf07721b73ea41e5d341be08e8ea4dce.tar.gz
electrode-0f79e708bf07721b73ea41e5d341be08e8ea4dce.tar.bz2
electrode-0f79e708bf07721b73ea41e5d341be08e8ea4dce.zip
Initial commit
Diffstat (limited to 'node_modules/strtok3/lib')
-rw-r--r--node_modules/strtok3/lib/AbstractTokenizer.d.ts62
-rw-r--r--node_modules/strtok3/lib/AbstractTokenizer.js69
-rw-r--r--node_modules/strtok3/lib/BufferTokenizer.d.ts37
-rw-r--r--node_modules/strtok3/lib/BufferTokenizer.js114
-rw-r--r--node_modules/strtok3/lib/FileTokenizer.d.ts28
-rw-r--r--node_modules/strtok3/lib/FileTokenizer.js112
-rw-r--r--node_modules/strtok3/lib/FsPromise.d.ts18
-rw-r--r--node_modules/strtok3/lib/FsPromise.js79
-rw-r--r--node_modules/strtok3/lib/ReadStreamTokenizer.d.ts28
-rw-r--r--node_modules/strtok3/lib/ReadStreamTokenizer.js128
-rw-r--r--node_modules/strtok3/lib/core.d.ts23
-rw-r--r--node_modules/strtok3/lib/core.js29
-rw-r--r--node_modules/strtok3/lib/index.d.ts15
-rw-r--r--node_modules/strtok3/lib/index.js27
-rw-r--r--node_modules/strtok3/lib/types.d.ts103
-rw-r--r--node_modules/strtok3/lib/types.js2
16 files changed, 874 insertions, 0 deletions
diff --git a/node_modules/strtok3/lib/AbstractTokenizer.d.ts b/node_modules/strtok3/lib/AbstractTokenizer.d.ts
new file mode 100644
index 0000000..936a758
--- /dev/null
+++ b/node_modules/strtok3/lib/AbstractTokenizer.d.ts
@@ -0,0 +1,62 @@
+/// <reference types="node" />
+import { ITokenizer, IFileInfo, IReadChunkOptions } from './types';
+import { IGetToken, IToken } from '@tokenizer/token';
+/**
+ * Core tokenizer
+ */
+export declare abstract class AbstractTokenizer implements ITokenizer {
+ fileInfo: IFileInfo;
+ protected constructor(fileInfo?: IFileInfo);
+ /**
+ * Tokenizer-stream position
+ */
+ position: number;
+ private numBuffer;
+ /**
+ * Read buffer from tokenizer
+ * @param buffer - Target buffer to fill with data read from the tokenizer-stream
+ * @param options - Additional read options
+ * @returns Promise with number of bytes read
+ */
+ abstract readBuffer(buffer: Buffer | Uint8Array, options?: IReadChunkOptions): Promise<number>;
+ /**
+ * Peek (read ahead) buffer from tokenizer
+ * @param buffer - Target buffer to fill with data peek from the tokenizer-stream
+ * @param options - Peek behaviour options
+ * @returns Promise with number of bytes read
+ */
+ abstract peekBuffer(buffer: Buffer | Uint8Array, options?: IReadChunkOptions): Promise<number>;
+ /**
+ * Read a token from the tokenizer-stream
+ * @param token - The token to read
+ * @param position - If provided, the desired position in the tokenizer-stream
+ * @returns Promise with token data
+ */
+ readToken<T>(token: IGetToken<T>, position?: number): Promise<T>;
+ /**
+ * Peek a token from the tokenizer-stream.
+ * @param token - Token to peek from the tokenizer-stream.
+ * @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
+ * @returns Promise with token data
+ */
+ peekToken<T>(token: IGetToken<T>, position?: number): Promise<T>;
+ /**
+ * Read a numeric token from the stream
+ * @param token - Numeric token
+ * @returns Promise with number
+ */
+ readNumber(token: IToken<number>): Promise<number>;
+ /**
+ * Read a numeric token from the stream
+ * @param token - Numeric token
+ * @returns Promise with number
+ */
+ peekNumber(token: IToken<number>): Promise<number>;
+ /**
+ * Ignore number of bytes, advances the pointer in under tokenizer-stream.
+ * @param length - Number of bytes to skip (ignore)
+ * @return actual number of bytes ignored
+ */
+ abstract ignore(length: number): Promise<number>;
+ close(): Promise<void>;
+}
diff --git a/node_modules/strtok3/lib/AbstractTokenizer.js b/node_modules/strtok3/lib/AbstractTokenizer.js
new file mode 100644
index 0000000..d4d4dba
--- /dev/null
+++ b/node_modules/strtok3/lib/AbstractTokenizer.js
@@ -0,0 +1,69 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.AbstractTokenizer = void 0;
+const peek_readable_1 = require("peek-readable");
+/**
+ * Core tokenizer
+ */
+class AbstractTokenizer {
+ constructor(fileInfo) {
+ /**
+ * Tokenizer-stream position
+ */
+ this.position = 0;
+ this.numBuffer = Buffer.alloc(10);
+ this.fileInfo = fileInfo ? fileInfo : {};
+ }
+ /**
+ * Read a token from the tokenizer-stream
+ * @param token - The token to read
+ * @param position - If provided, the desired position in the tokenizer-stream
+ * @returns Promise with token data
+ */
+ async readToken(token, position) {
+ const buffer = Buffer.alloc(token.len);
+ const len = await this.readBuffer(buffer, { position });
+ if (len < token.len)
+ throw new peek_readable_1.EndOfStreamError();
+ return token.get(buffer, 0);
+ }
+ /**
+ * Peek a token from the tokenizer-stream.
+ * @param token - Token to peek from the tokenizer-stream.
+ * @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
+ * @returns Promise with token data
+ */
+ async peekToken(token, position = this.position) {
+ const buffer = Buffer.alloc(token.len);
+ const len = await this.peekBuffer(buffer, { position });
+ if (len < token.len)
+ throw new peek_readable_1.EndOfStreamError();
+ return token.get(buffer, 0);
+ }
+ /**
+ * Read a numeric token from the stream
+ * @param token - Numeric token
+ * @returns Promise with number
+ */
+ async readNumber(token) {
+ const len = await this.readBuffer(this.numBuffer, { length: token.len });
+ if (len < token.len)
+ throw new peek_readable_1.EndOfStreamError();
+ return token.get(this.numBuffer, 0);
+ }
+ /**
+ * Read a numeric token from the stream
+ * @param token - Numeric token
+ * @returns Promise with number
+ */
+ async peekNumber(token) {
+ const len = await this.peekBuffer(this.numBuffer, { length: token.len });
+ if (len < token.len)
+ throw new peek_readable_1.EndOfStreamError();
+ return token.get(this.numBuffer, 0);
+ }
+ async close() {
+ // empty
+ }
+}
+exports.AbstractTokenizer = AbstractTokenizer;
diff --git a/node_modules/strtok3/lib/BufferTokenizer.d.ts b/node_modules/strtok3/lib/BufferTokenizer.d.ts
new file mode 100644
index 0000000..1dd6119
--- /dev/null
+++ b/node_modules/strtok3/lib/BufferTokenizer.d.ts
@@ -0,0 +1,37 @@
+/// <reference types="node" />
+import { IFileInfo, IReadChunkOptions, ITokenizer } from './types';
+import { IGetToken, IToken } from '@tokenizer/token';
+export declare class BufferTokenizer implements ITokenizer {
+ private buffer;
+ fileInfo: IFileInfo;
+ position: number;
+ /**
+ * Construct BufferTokenizer
+ * @param buffer - Buffer to tokenize
+ * @param fileInfo - Pass additional file information to the tokenizer
+ */
+ constructor(buffer: Buffer, fileInfo?: IFileInfo);
+ /**
+ * Read buffer from tokenizer
+ * @param buffer
+ * @param options - Read behaviour options
+ * @returns {Promise<number>}
+ */
+ readBuffer(buffer: Buffer | Uint8Array, options?: IReadChunkOptions): Promise<number>;
+ /**
+ * Peek (read ahead) buffer from tokenizer
+ * @param buffer
+ * @param options - Read behaviour options
+ * @returns {Promise<number>}
+ */
+ peekBuffer(buffer: Buffer | Uint8Array, options?: IReadChunkOptions): Promise<number>;
+ readToken<T>(token: IGetToken<T>, position?: number): Promise<T>;
+ peekToken<T>(token: IGetToken<T>, position?: number): Promise<T>;
+ readNumber(token: IToken<number>): Promise<number>;
+ peekNumber(token: IToken<number>): Promise<number>;
+ /**
+ * @return actual number of bytes ignored
+ */
+ ignore(length: number): Promise<number>;
+ close(): Promise<void>;
+}
diff --git a/node_modules/strtok3/lib/BufferTokenizer.js b/node_modules/strtok3/lib/BufferTokenizer.js
new file mode 100644
index 0000000..aae1395
--- /dev/null
+++ b/node_modules/strtok3/lib/BufferTokenizer.js
@@ -0,0 +1,114 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.BufferTokenizer = void 0;
+const peek_readable_1 = require("peek-readable");
+class BufferTokenizer {
+ /**
+ * Construct BufferTokenizer
+ * @param buffer - Buffer to tokenize
+ * @param fileInfo - Pass additional file information to the tokenizer
+ */
+ constructor(buffer, fileInfo) {
+ this.buffer = buffer;
+ this.position = 0;
+ this.fileInfo = fileInfo ? fileInfo : {};
+ this.fileInfo.size = this.fileInfo.size ? this.fileInfo.size : buffer.length;
+ }
+ /**
+ * Read buffer from tokenizer
+ * @param buffer
+ * @param options - Read behaviour options
+ * @returns {Promise<number>}
+ */
+ async readBuffer(buffer, options) {
+ if (options && options.position) {
+ if (options.position < this.position) {
+ throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
+ }
+ this.position = options.position;
+ }
+ return this.peekBuffer(buffer, options).then(bytesRead => {
+ this.position += bytesRead;
+ return bytesRead;
+ });
+ }
+ /**
+ * Peek (read ahead) buffer from tokenizer
+ * @param buffer
+ * @param options - Read behaviour options
+ * @returns {Promise<number>}
+ */
+ async peekBuffer(buffer, options) {
+ let offset = 0;
+ let length = buffer.length;
+ let position = this.position;
+ if (options) {
+ if (options.position) {
+ if (options.position < this.position) {
+ throw new Error('`options.position` can be less than `tokenizer.position`');
+ }
+ position = options.position;
+ }
+ if (Number.isInteger(options.length)) {
+ length = options.length;
+ }
+ else {
+ length -= options.offset || 0;
+ }
+ if (options.offset) {
+ offset = options.offset;
+ }
+ }
+ if (length === 0) {
+ return Promise.resolve(0);
+ }
+ position = position || this.position;
+ if (!length) {
+ length = buffer.length;
+ }
+ const bytes2read = Math.min(this.buffer.length - position, length);
+ if ((!options || !options.mayBeLess) && bytes2read < length) {
+ throw new peek_readable_1.EndOfStreamError();
+ }
+ else {
+ this.buffer.copy(buffer, offset, position, position + bytes2read);
+ return bytes2read;
+ }
+ }
+ async readToken(token, position) {
+ this.position = position || this.position;
+ try {
+ const tv = this.peekToken(token, this.position);
+ this.position += token.len;
+ return tv;
+ }
+ catch (err) {
+ this.position += this.buffer.length - position;
+ throw err;
+ }
+ }
+ async peekToken(token, position = this.position) {
+ if (this.buffer.length - position < token.len) {
+ throw new peek_readable_1.EndOfStreamError();
+ }
+ return token.get(this.buffer, position);
+ }
+ async readNumber(token) {
+ return this.readToken(token);
+ }
+ async peekNumber(token) {
+ return this.peekToken(token);
+ }
+ /**
+ * @return actual number of bytes ignored
+ */
+ async ignore(length) {
+ const bytesIgnored = Math.min(this.buffer.length - this.position, length);
+ this.position += bytesIgnored;
+ return bytesIgnored;
+ }
+ async close() {
+ // empty
+ }
+}
+exports.BufferTokenizer = BufferTokenizer;
diff --git a/node_modules/strtok3/lib/FileTokenizer.d.ts b/node_modules/strtok3/lib/FileTokenizer.d.ts
new file mode 100644
index 0000000..c15f74b
--- /dev/null
+++ b/node_modules/strtok3/lib/FileTokenizer.d.ts
@@ -0,0 +1,28 @@
+/// <reference types="node" />
+import { AbstractTokenizer } from './AbstractTokenizer';
+import { IFileInfo, IReadChunkOptions } from './types';
+export declare class FileTokenizer extends AbstractTokenizer {
+ private fd;
+ constructor(fd: number, fileInfo: IFileInfo);
+ /**
+ * Read buffer from file
+ * @param buffer
+ * @param options - Read behaviour options
+ * @returns Promise number of bytes read
+ */
+ readBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>;
+ /**
+ * Peek buffer from file
+ * @param buffer
+ * @param options - Read behaviour options
+ * @returns Promise number of bytes read
+ */
+ peekBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>;
+ /**
+ * @param length - Number of bytes to ignore
+ * @return resolves the number of bytes ignored, equals length if this available, otherwise the number of bytes available
+ */
+ ignore(length: number): Promise<number>;
+ close(): Promise<void>;
+}
+export declare function fromFile(sourceFilePath: string): Promise<FileTokenizer>;
diff --git a/node_modules/strtok3/lib/FileTokenizer.js b/node_modules/strtok3/lib/FileTokenizer.js
new file mode 100644
index 0000000..b3adc24
--- /dev/null
+++ b/node_modules/strtok3/lib/FileTokenizer.js
@@ -0,0 +1,112 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fromFile = exports.FileTokenizer = void 0;
+const AbstractTokenizer_1 = require("./AbstractTokenizer");
+const peek_readable_1 = require("peek-readable");
+const fs = require("./FsPromise");
+class FileTokenizer extends AbstractTokenizer_1.AbstractTokenizer {
+ constructor(fd, fileInfo) {
+ super(fileInfo);
+ this.fd = fd;
+ }
+ /**
+ * Read buffer from file
+ * @param buffer
+ * @param options - Read behaviour options
+ * @returns Promise number of bytes read
+ */
+ async readBuffer(buffer, options) {
+ let offset = 0;
+ let length = buffer.length;
+ if (options) {
+ if (options.position) {
+ if (options.position < this.position) {
+ throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
+ }
+ this.position = options.position;
+ }
+ if (Number.isInteger(options.length)) {
+ length = options.length;
+ }
+ else {
+ length -= options.offset || 0;
+ }
+ if (options.offset) {
+ offset = options.offset;
+ }
+ }
+ if (length === 0) {
+ return Promise.resolve(0);
+ }
+ const res = await fs.read(this.fd, buffer, offset, length, this.position);
+ this.position += res.bytesRead;
+ if (res.bytesRead < length && (!options || !options.mayBeLess)) {
+ throw new peek_readable_1.EndOfStreamError();
+ }
+ return res.bytesRead;
+ }
+ /**
+ * Peek buffer from file
+ * @param buffer
+ * @param options - Read behaviour options
+ * @returns Promise number of bytes read
+ */
+ async peekBuffer(buffer, options) {
+ let offset = 0;
+ let length = buffer.length;
+ let position = this.position;
+ if (options) {
+ if (options.position) {
+ if (options.position < this.position) {
+ throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
+ }
+ position = options.position;
+ }
+ if (Number.isInteger(options.length)) {
+ length = options.length;
+ }
+ else {
+ length -= options.offset || 0;
+ }
+ if (options.offset) {
+ offset = options.offset;
+ }
+ }
+ if (length === 0) {
+ return Promise.resolve(0);
+ }
+ const res = await fs.read(this.fd, buffer, offset, length, position);
+ if ((!options || !options.mayBeLess) && res.bytesRead < length) {
+ throw new peek_readable_1.EndOfStreamError();
+ }
+ return res.bytesRead;
+ }
+ /**
+ * @param length - Number of bytes to ignore
+ * @return resolves the number of bytes ignored, equals length if this available, otherwise the number of bytes available
+ */
+ async ignore(length) {
+ const bytesLeft = this.fileInfo.size - this.position;
+ if (length <= bytesLeft) {
+ this.position += length;
+ return length;
+ }
+ else {
+ this.position += bytesLeft;
+ return bytesLeft;
+ }
+ }
+ async close() {
+ return fs.close(this.fd);
+ }
+}
+exports.FileTokenizer = FileTokenizer;
+async function fromFile(sourceFilePath) {
+ const stat = await fs.stat(sourceFilePath);
+ if (!stat.isFile) {
+ throw new Error(`File not a file: ${sourceFilePath}`);
+ }
+ const fd = await fs.open(sourceFilePath, 'r');
+ return new FileTokenizer(fd, { path: sourceFilePath, size: stat.size });
+}
+exports.fromFile = fromFile;
diff --git a/node_modules/strtok3/lib/FsPromise.d.ts b/node_modules/strtok3/lib/FsPromise.d.ts
new file mode 100644
index 0000000..1c9917a
--- /dev/null
+++ b/node_modules/strtok3/lib/FsPromise.d.ts
@@ -0,0 +1,18 @@
+/**
+ * Module convert fs functions to promise based functions
+ */
+/// <reference types="node" />
+import * as fs from 'fs';
+export interface IReadResult {
+ bytesRead: number;
+ buffer: Buffer;
+}
+export declare const pathExists: typeof fs.existsSync;
+export declare const createReadStream: typeof fs.createReadStream;
+export declare function stat(path: fs.PathLike): Promise<fs.Stats>;
+export declare function close(fd: number): Promise<void>;
+export declare function open(path: fs.PathLike, mode?: string): Promise<number>;
+export declare function read(fd: number, buffer: Buffer, offset: number, length: number, position: number): Promise<IReadResult>;
+export declare function writeFile(path: fs.PathLike, data: Buffer | string): Promise<void>;
+export declare function writeFileSync(path: fs.PathLike, data: Buffer | string): void;
+export declare function readFile(path: fs.PathLike): Promise<Buffer>;
diff --git a/node_modules/strtok3/lib/FsPromise.js b/node_modules/strtok3/lib/FsPromise.js
new file mode 100644
index 0000000..690a5e2
--- /dev/null
+++ b/node_modules/strtok3/lib/FsPromise.js
@@ -0,0 +1,79 @@
+"use strict";
+/**
+ * Module convert fs functions to promise based functions
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.readFile = exports.writeFileSync = exports.writeFile = exports.read = exports.open = exports.close = exports.stat = exports.createReadStream = exports.pathExists = void 0;
+const fs = require("fs");
+exports.pathExists = fs.existsSync;
+exports.createReadStream = fs.createReadStream;
+async function stat(path) {
+ return new Promise((resolve, reject) => {
+ fs.stat(path, (err, stats) => {
+ if (err)
+ reject(err);
+ else
+ resolve(stats);
+ });
+ });
+}
+exports.stat = stat;
+async function close(fd) {
+ return new Promise((resolve, reject) => {
+ fs.close(fd, err => {
+ if (err)
+ reject(err);
+ else
+ resolve();
+ });
+ });
+}
+exports.close = close;
+async function open(path, mode) {
+ return new Promise((resolve, reject) => {
+ fs.open(path, mode, (err, fd) => {
+ if (err)
+ reject(err);
+ else
+ resolve(fd);
+ });
+ });
+}
+exports.open = open;
+async function read(fd, buffer, offset, length, position) {
+ return new Promise((resolve, reject) => {
+ fs.read(fd, buffer, offset, length, position, (err, bytesRead, _buffer) => {
+ if (err)
+ reject(err);
+ else
+ resolve({ bytesRead, buffer: _buffer });
+ });
+ });
+}
+exports.read = read;
+async function writeFile(path, data) {
+ return new Promise((resolve, reject) => {
+ fs.writeFile(path, data, err => {
+ if (err)
+ reject(err);
+ else
+ resolve();
+ });
+ });
+}
+exports.writeFile = writeFile;
+function writeFileSync(path, data) {
+ fs.writeFileSync(path, data);
+}
+exports.writeFileSync = writeFileSync;
+async function readFile(path) {
+ return new Promise((resolve, reject) => {
+ fs.readFile(path, (err, buffer) => {
+ if (err)
+ reject(err);
+ else
+ resolve(buffer);
+ });
+ });
+}
+exports.readFile = readFile;
diff --git a/node_modules/strtok3/lib/ReadStreamTokenizer.d.ts b/node_modules/strtok3/lib/ReadStreamTokenizer.d.ts
new file mode 100644
index 0000000..fe0280f
--- /dev/null
+++ b/node_modules/strtok3/lib/ReadStreamTokenizer.d.ts
@@ -0,0 +1,28 @@
+/// <reference types="node" />
+import { AbstractTokenizer } from './AbstractTokenizer';
+import * as Stream from 'stream';
+import { IFileInfo, IReadChunkOptions } from './types';
+export declare class ReadStreamTokenizer extends AbstractTokenizer {
+ private streamReader;
+ constructor(stream: Stream.Readable, fileInfo?: IFileInfo);
+ /**
+ * Get file information, an HTTP-client may implement this doing a HEAD request
+ * @return Promise with file information
+ */
+ getFileInfo(): Promise<IFileInfo>;
+ /**
+ * Read buffer from tokenizer
+ * @param buffer - Target buffer to fill with data read from the tokenizer-stream
+ * @param options - Read behaviour options
+ * @returns Promise with number of bytes read
+ */
+ readBuffer(buffer: Buffer | Uint8Array, options?: IReadChunkOptions): Promise<number>;
+ /**
+ * Peek (read ahead) buffer from tokenizer
+ * @param buffer - Target buffer to write the data read to
+ * @param options - Read behaviour options
+ * @returns Promise with number of bytes peeked
+ */
+ peekBuffer(buffer: Buffer | Uint8Array, options?: IReadChunkOptions): Promise<number>;
+ ignore(length: number): Promise<number>;
+}
diff --git a/node_modules/strtok3/lib/ReadStreamTokenizer.js b/node_modules/strtok3/lib/ReadStreamTokenizer.js
new file mode 100644
index 0000000..142d154
--- /dev/null
+++ b/node_modules/strtok3/lib/ReadStreamTokenizer.js
@@ -0,0 +1,128 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ReadStreamTokenizer = void 0;
+const AbstractTokenizer_1 = require("./AbstractTokenizer");
+const peek_readable_1 = require("peek-readable");
+// import * as _debug from 'debug';
+// const debug = _debug('strtok3:ReadStreamTokenizer');
+const maxBufferSize = 256000;
+class ReadStreamTokenizer extends AbstractTokenizer_1.AbstractTokenizer {
+ constructor(stream, fileInfo) {
+ super(fileInfo);
+ this.streamReader = new peek_readable_1.StreamReader(stream);
+ }
+ /**
+ * Get file information, an HTTP-client may implement this doing a HEAD request
+ * @return Promise with file information
+ */
+ async getFileInfo() {
+ return this.fileInfo;
+ }
+ /**
+ * Read buffer from tokenizer
+ * @param buffer - Target buffer to fill with data read from the tokenizer-stream
+ * @param options - Read behaviour options
+ * @returns Promise with number of bytes read
+ */
+ async readBuffer(buffer, options) {
+ // const _offset = position ? position : this.position;
+ // debug(`readBuffer ${_offset}...${_offset + length - 1}`);
+ let offset = 0;
+ let length = buffer.length;
+ if (options) {
+ if (Number.isInteger(options.length)) {
+ length = options.length;
+ }
+ else {
+ length -= options.offset || 0;
+ }
+ if (options.position) {
+ const skipBytes = options.position - this.position;
+ if (skipBytes > 0) {
+ await this.ignore(skipBytes);
+ return this.readBuffer(buffer, options);
+ }
+ else if (skipBytes < 0) {
+ throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
+ }
+ }
+ if (options.offset) {
+ offset = options.offset;
+ }
+ }
+ if (length === 0) {
+ return 0;
+ }
+ const bytesRead = await this.streamReader.read(buffer, offset, length);
+ this.position += bytesRead;
+ if ((!options || !options.mayBeLess) && bytesRead < length) {
+ throw new peek_readable_1.EndOfStreamError();
+ }
+ return bytesRead;
+ }
+ /**
+ * Peek (read ahead) buffer from tokenizer
+ * @param buffer - Target buffer to write the data read to
+ * @param options - Read behaviour options
+ * @returns Promise with number of bytes peeked
+ */
+ async peekBuffer(buffer, options) {
+ // const _offset = position ? position : this.position;
+ // debug(`peek ${_offset}...${_offset + length - 1}`);
+ let offset = 0;
+ let bytesRead;
+ let length = buffer.length;
+ if (options) {
+ if (options.offset) {
+ offset = options.offset;
+ }
+ if (Number.isInteger(options.length)) {
+ length = options.length;
+ }
+ else {
+ length -= options.offset || 0;
+ }
+ if (options.position) {
+ const skipBytes = options.position - this.position;
+ if (skipBytes > 0) {
+ const skipBuffer = Buffer.alloc(length + skipBytes);
+ bytesRead = await this.peekBuffer(skipBuffer, { mayBeLess: options.mayBeLess });
+ skipBuffer.copy(buffer, offset, skipBytes);
+ return bytesRead - skipBytes;
+ }
+ else if (skipBytes < 0) {
+ throw new Error('Cannot peek from a negative offset in a stream');
+ }
+ }
+ }
+ try {
+ bytesRead = await this.streamReader.peek(buffer, offset, length);
+ }
+ catch (err) {
+ if (options && options.mayBeLess && err instanceof peek_readable_1.EndOfStreamError) {
+ return 0;
+ }
+ throw err;
+ }
+ if ((!options || !options.mayBeLess) && bytesRead < length) {
+ throw new peek_readable_1.EndOfStreamError();
+ }
+ return bytesRead;
+ }
+ async ignore(length) {
+ // debug(`ignore ${this.position}...${this.position + length - 1}`);
+ const bufSize = Math.min(maxBufferSize, length);
+ const buf = Buffer.alloc(bufSize);
+ let totBytesRead = 0;
+ while (totBytesRead < length) {
+ const remaining = length - totBytesRead;
+ const bytesRead = await this.readBuffer(buf, { length: Math.min(bufSize, remaining) });
+ if (bytesRead < 0) {
+ return bytesRead;
+ }
+ totBytesRead += bytesRead;
+ }
+ return totBytesRead;
+ }
+}
+exports.ReadStreamTokenizer = ReadStreamTokenizer;
diff --git a/node_modules/strtok3/lib/core.d.ts b/node_modules/strtok3/lib/core.d.ts
new file mode 100644
index 0000000..72db304
--- /dev/null
+++ b/node_modules/strtok3/lib/core.d.ts
@@ -0,0 +1,23 @@
+/// <reference types="node" />
+import { ReadStreamTokenizer } from './ReadStreamTokenizer';
+import * as Stream from 'stream';
+import { BufferTokenizer } from './BufferTokenizer';
+import { IFileInfo } from './types';
+export { EndOfStreamError } from 'peek-readable';
+export { ITokenizer, IFileInfo } from './types';
+export { IToken, IGetToken } from '@tokenizer/token';
+/**
+ * Construct ReadStreamTokenizer from given Stream.
+ * Will set fileSize, if provided given Stream has set the .path property/
+ * @param stream - Read from Node.js Stream.Readable
+ * @param fileInfo - Pass the file information, like size and MIME-type of the correspnding stream.
+ * @returns ReadStreamTokenizer
+ */
+export declare function fromStream(stream: Stream.Readable, fileInfo?: IFileInfo): ReadStreamTokenizer;
+/**
+ * Construct ReadStreamTokenizer from given Buffer.
+ * @param buffer - Buffer to tokenize
+ * @param fileInfo - Pass additional file information to the tokenizer
+ * @returns BufferTokenizer
+ */
+export declare function fromBuffer(buffer: Buffer, fileInfo?: IFileInfo): BufferTokenizer;
diff --git a/node_modules/strtok3/lib/core.js b/node_modules/strtok3/lib/core.js
new file mode 100644
index 0000000..e572b1c
--- /dev/null
+++ b/node_modules/strtok3/lib/core.js
@@ -0,0 +1,29 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fromBuffer = exports.fromStream = exports.EndOfStreamError = void 0;
+const ReadStreamTokenizer_1 = require("./ReadStreamTokenizer");
+const BufferTokenizer_1 = require("./BufferTokenizer");
+var peek_readable_1 = require("peek-readable");
+Object.defineProperty(exports, "EndOfStreamError", { enumerable: true, get: function () { return peek_readable_1.EndOfStreamError; } });
+/**
+ * Construct ReadStreamTokenizer from given Stream.
+ * Will set fileSize, if provided given Stream has set the .path property/
+ * @param stream - Read from Node.js Stream.Readable
+ * @param fileInfo - Pass the file information, like size and MIME-type of the correspnding stream.
+ * @returns ReadStreamTokenizer
+ */
+function fromStream(stream, fileInfo) {
+ fileInfo = fileInfo ? fileInfo : {};
+ return new ReadStreamTokenizer_1.ReadStreamTokenizer(stream, fileInfo);
+}
+exports.fromStream = fromStream;
+/**
+ * Construct ReadStreamTokenizer from given Buffer.
+ * @param buffer - Buffer to tokenize
+ * @param fileInfo - Pass additional file information to the tokenizer
+ * @returns BufferTokenizer
+ */
+function fromBuffer(buffer, fileInfo) {
+ return new BufferTokenizer_1.BufferTokenizer(buffer, fileInfo);
+}
+exports.fromBuffer = fromBuffer;
diff --git a/node_modules/strtok3/lib/index.d.ts b/node_modules/strtok3/lib/index.d.ts
new file mode 100644
index 0000000..95397c3
--- /dev/null
+++ b/node_modules/strtok3/lib/index.d.ts
@@ -0,0 +1,15 @@
+/// <reference types="node" />
+import * as Stream from 'stream';
+import { ReadStreamTokenizer } from './ReadStreamTokenizer';
+import * as core from './core';
+export { fromFile } from './FileTokenizer';
+export { ITokenizer, EndOfStreamError, fromBuffer, IFileInfo } from './core';
+export { IToken, IGetToken } from '@tokenizer/token';
+/**
+ * Construct ReadStreamTokenizer from given Stream.
+ * Will set fileSize, if provided given Stream has set the .path property.
+ * @param stream - Node.js Stream.Readable
+ * @param fileInfo - Pass additional file information to the tokenizer
+ * @returns Tokenizer
+ */
+export declare function fromStream(stream: Stream.Readable, fileInfo?: core.IFileInfo): Promise<ReadStreamTokenizer>;
diff --git a/node_modules/strtok3/lib/index.js b/node_modules/strtok3/lib/index.js
new file mode 100644
index 0000000..030e750
--- /dev/null
+++ b/node_modules/strtok3/lib/index.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fromStream = exports.fromBuffer = exports.EndOfStreamError = exports.fromFile = void 0;
+const fs = require("./FsPromise");
+const core = require("./core");
+var FileTokenizer_1 = require("./FileTokenizer");
+Object.defineProperty(exports, "fromFile", { enumerable: true, get: function () { return FileTokenizer_1.fromFile; } });
+var core_1 = require("./core");
+Object.defineProperty(exports, "EndOfStreamError", { enumerable: true, get: function () { return core_1.EndOfStreamError; } });
+Object.defineProperty(exports, "fromBuffer", { enumerable: true, get: function () { return core_1.fromBuffer; } });
+/**
+ * Construct ReadStreamTokenizer from given Stream.
+ * Will set fileSize, if provided given Stream has set the .path property.
+ * @param stream - Node.js Stream.Readable
+ * @param fileInfo - Pass additional file information to the tokenizer
+ * @returns Tokenizer
+ */
+async function fromStream(stream, fileInfo) {
+ fileInfo = fileInfo ? fileInfo : {};
+ if (stream.path) {
+ const stat = await fs.stat(stream.path);
+ fileInfo.path = stream.path;
+ fileInfo.size = stat.size;
+ }
+ return core.fromStream(stream, fileInfo);
+}
+exports.fromStream = fromStream;
diff --git a/node_modules/strtok3/lib/types.d.ts b/node_modules/strtok3/lib/types.d.ts
new file mode 100644
index 0000000..93895df
--- /dev/null
+++ b/node_modules/strtok3/lib/types.d.ts
@@ -0,0 +1,103 @@
+/// <reference types="node" />
+import { IGetToken } from '@tokenizer/token';
+export interface IFileInfo {
+ /**
+ * File size in bytes
+ */
+ size?: number;
+ /**
+ * MIME-type of file
+ */
+ mimeType?: string;
+ /**
+ * File path
+ */
+ path?: string;
+ /**
+ * File URL
+ */
+ url?: string;
+}
+export interface IReadChunkOptions {
+ /**
+ * The offset in the buffer to start writing at; default is 0
+ */
+ offset?: number;
+ /**
+ * Number of bytes to read.
+ */
+ length?: number;
+ /**
+ * Position where to begin reading from the file.
+ * Default it is `tokenizer.position`.
+ * Position may not be less then `tokenizer.position`.
+ */
+ position?: number;
+ /**
+ * If set, will not throw an EOF error if not all of the requested data could be read
+ */
+ mayBeLess?: boolean;
+}
+/**
+ * The tokenizer allows us to read or peek from the tokenizer-stream.
+ * The tokenizer-stream is an abstraction of a stream, file or Buffer.
+ */
+export interface ITokenizer {
+ /**
+ * Provide access to information of the underlying information stream or file.
+ */
+ fileInfo: IFileInfo;
+ /**
+ * Offset in bytes (= number of bytes read) since beginning of file or stream
+ */
+ position: number;
+ /**
+ * Peek (read ahead) buffer from tokenizer
+ * @param buffer - Target buffer to fill with data peek from the tokenizer-stream
+ * @param options - Read behaviour options
+ * @returns Promise with number of bytes read
+ */
+ peekBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>;
+ /**
+ * Peek (read ahead) buffer from tokenizer
+ * @param buffer - Target buffer to fill with data peeked from the tokenizer-stream
+ * @param options - Additional read options
+ * @returns Promise with number of bytes read
+ */
+ readBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>;
+ /**
+ * Peek a token from the tokenizer-stream.
+ * @param token - Token to peek from the tokenizer-stream.
+ * @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
+ * @param maybeless - If set, will not throw an EOF error if the less then the requested length could be read.
+ */
+ peekToken<T>(token: IGetToken<T>, position?: number | null, maybeless?: boolean): Promise<T>;
+ /**
+ * Read a token from the tokenizer-stream.
+ * @param token - Token to peek from the tokenizer-stream.
+ * @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
+ */
+ readToken<T>(token: IGetToken<T>, position?: number): Promise<T>;
+ /**
+ * Peek a numeric token from the stream
+ * @param token - Numeric token
+ * @returns Promise with number
+ */
+ peekNumber(token: IGetToken<number>): Promise<number>;
+ /**
+ * Read a numeric token from the stream
+ * @param token - Numeric token
+ * @returns Promise with number
+ */
+ readNumber(token: IGetToken<number>): Promise<number>;
+ /**
+ * Ignore given number of bytes
+ * @param length - Number of bytes ignored
+ */
+ ignore(length: number): Promise<number>;
+ /**
+ * Clean up resources.
+ * It does not close the stream for StreamReader, but is does close the file-descriptor.
+ */
+ close(): Promise<void>;
+}
diff --git a/node_modules/strtok3/lib/types.js b/node_modules/strtok3/lib/types.js
new file mode 100644
index 0000000..c8ad2e5
--- /dev/null
+++ b/node_modules/strtok3/lib/types.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });