aboutsummaryrefslogtreecommitdiff
path: root/node_modules/strtok3/lib/AbstractTokenizer.d.ts
diff options
context:
space:
mode:
authorMinteck <freeziv.ytb@gmail.com>2021-03-07 18:29:17 +0100
committerMinteck <freeziv.ytb@gmail.com>2021-03-07 18:29:17 +0100
commit0f79e708bf07721b73ea41e5d341be08e8ea4dce (patch)
treef3c63cd6a9f4ef0b26f95eec6a031600232e80c8 /node_modules/strtok3/lib/AbstractTokenizer.d.ts
downloadelectrode-0f79e708bf07721b73ea41e5d341be08e8ea4dce.tar.gz
electrode-0f79e708bf07721b73ea41e5d341be08e8ea4dce.tar.bz2
electrode-0f79e708bf07721b73ea41e5d341be08e8ea4dce.zip
Initial commit
Diffstat (limited to 'node_modules/strtok3/lib/AbstractTokenizer.d.ts')
-rw-r--r--node_modules/strtok3/lib/AbstractTokenizer.d.ts62
1 files changed, 62 insertions, 0 deletions
diff --git a/node_modules/strtok3/lib/AbstractTokenizer.d.ts b/node_modules/strtok3/lib/AbstractTokenizer.d.ts
new file mode 100644
index 0000000..936a758
--- /dev/null
+++ b/node_modules/strtok3/lib/AbstractTokenizer.d.ts
@@ -0,0 +1,62 @@
+/// <reference types="node" />
+import { ITokenizer, IFileInfo, IReadChunkOptions } from './types';
+import { IGetToken, IToken } from '@tokenizer/token';
+/**
+ * Core tokenizer
+ */
+export declare abstract class AbstractTokenizer implements ITokenizer {
+ fileInfo: IFileInfo;
+ protected constructor(fileInfo?: IFileInfo);
+ /**
+ * Tokenizer-stream position
+ */
+ position: number;
+ private numBuffer;
+ /**
+ * Read buffer from tokenizer
+ * @param buffer - Target buffer to fill with data read from the tokenizer-stream
+ * @param options - Additional read options
+ * @returns Promise with number of bytes read
+ */
+ abstract readBuffer(buffer: Buffer | Uint8Array, options?: IReadChunkOptions): Promise<number>;
+ /**
+ * Peek (read ahead) buffer from tokenizer
+ * @param buffer - Target buffer to fill with data peek from the tokenizer-stream
+ * @param options - Peek behaviour options
+ * @returns Promise with number of bytes read
+ */
+ abstract peekBuffer(buffer: Buffer | Uint8Array, options?: IReadChunkOptions): Promise<number>;
+ /**
+ * Read a token from the tokenizer-stream
+ * @param token - The token to read
+ * @param position - If provided, the desired position in the tokenizer-stream
+ * @returns Promise with token data
+ */
+ readToken<T>(token: IGetToken<T>, position?: number): Promise<T>;
+ /**
+ * Peek a token from the tokenizer-stream.
+ * @param token - Token to peek from the tokenizer-stream.
+ * @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
+ * @returns Promise with token data
+ */
+ peekToken<T>(token: IGetToken<T>, position?: number): Promise<T>;
+ /**
+ * Read a numeric token from the stream
+ * @param token - Numeric token
+ * @returns Promise with number
+ */
+ readNumber(token: IToken<number>): Promise<number>;
+ /**
+ * Read a numeric token from the stream
+ * @param token - Numeric token
+ * @returns Promise with number
+ */
+ peekNumber(token: IToken<number>): Promise<number>;
+ /**
+ * Ignore number of bytes, advances the pointer in under tokenizer-stream.
+ * @param length - Number of bytes to skip (ignore)
+ * @return actual number of bytes ignored
+ */
+ abstract ignore(length: number): Promise<number>;
+ close(): Promise<void>;
+}