diff options
author | Minteck <freeziv.ytb@gmail.com> | 2021-03-07 18:29:17 +0100 |
---|---|---|
committer | Minteck <freeziv.ytb@gmail.com> | 2021-03-07 18:29:17 +0100 |
commit | 0f79e708bf07721b73ea41e5d341be08e8ea4dce (patch) | |
tree | f3c63cd6a9f4ef0b26f95eec6a031600232e80c8 /node_modules/strtok3/lib/BufferTokenizer.js | |
download | electrode-0f79e708bf07721b73ea41e5d341be08e8ea4dce.tar.gz electrode-0f79e708bf07721b73ea41e5d341be08e8ea4dce.tar.bz2 electrode-0f79e708bf07721b73ea41e5d341be08e8ea4dce.zip |
Initial commit
Diffstat (limited to 'node_modules/strtok3/lib/BufferTokenizer.js')
-rw-r--r-- | node_modules/strtok3/lib/BufferTokenizer.js | 114 |
1 files changed, 114 insertions, 0 deletions
diff --git a/node_modules/strtok3/lib/BufferTokenizer.js b/node_modules/strtok3/lib/BufferTokenizer.js new file mode 100644 index 0000000..aae1395 --- /dev/null +++ b/node_modules/strtok3/lib/BufferTokenizer.js @@ -0,0 +1,114 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BufferTokenizer = void 0; +const peek_readable_1 = require("peek-readable"); +class BufferTokenizer { + /** + * Construct BufferTokenizer + * @param buffer - Buffer to tokenize + * @param fileInfo - Pass additional file information to the tokenizer + */ + constructor(buffer, fileInfo) { + this.buffer = buffer; + this.position = 0; + this.fileInfo = fileInfo ? fileInfo : {}; + this.fileInfo.size = this.fileInfo.size ? this.fileInfo.size : buffer.length; + } + /** + * Read buffer from tokenizer + * @param buffer + * @param options - Read behaviour options + * @returns {Promise<number>} + */ + async readBuffer(buffer, options) { + if (options && options.position) { + if (options.position < this.position) { + throw new Error('`options.position` must be equal or greater than `tokenizer.position`'); + } + this.position = options.position; + } + return this.peekBuffer(buffer, options).then(bytesRead => { + this.position += bytesRead; + return bytesRead; + }); + } + /** + * Peek (read ahead) buffer from tokenizer + * @param buffer + * @param options - Read behaviour options + * @returns {Promise<number>} + */ + async peekBuffer(buffer, options) { + let offset = 0; + let length = buffer.length; + let position = this.position; + if (options) { + if (options.position) { + if (options.position < this.position) { + throw new Error('`options.position` can be less than `tokenizer.position`'); + } + position = options.position; + } + if (Number.isInteger(options.length)) { + length = options.length; + } + else { + length -= options.offset || 0; + } + if (options.offset) { + offset = options.offset; + } + } + if (length === 0) { + return Promise.resolve(0); + } + position = position || this.position; + if (!length) { + length = buffer.length; + } + const bytes2read = Math.min(this.buffer.length - position, length); + if ((!options || !options.mayBeLess) && bytes2read < length) { + throw new peek_readable_1.EndOfStreamError(); + } + else { + this.buffer.copy(buffer, offset, position, position + bytes2read); + return bytes2read; + } + } + async readToken(token, position) { + this.position = position || this.position; + try { + const tv = this.peekToken(token, this.position); + this.position += token.len; + return tv; + } + catch (err) { + this.position += this.buffer.length - position; + throw err; + } + } + async peekToken(token, position = this.position) { + if (this.buffer.length - position < token.len) { + throw new peek_readable_1.EndOfStreamError(); + } + return token.get(this.buffer, position); + } + async readNumber(token) { + return this.readToken(token); + } + async peekNumber(token) { + return this.peekToken(token); + } + /** + * @return actual number of bytes ignored + */ + async ignore(length) { + const bytesIgnored = Math.min(this.buffer.length - this.position, length); + this.position += bytesIgnored; + return bytesIgnored; + } + async close() { + // empty + } +} +exports.BufferTokenizer = BufferTokenizer; |