1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ReadStreamTokenizer = void 0;
const AbstractTokenizer_1 = require("./AbstractTokenizer");
const peek_readable_1 = require("peek-readable");
// import * as _debug from 'debug';
// const debug = _debug('strtok3:ReadStreamTokenizer');
const maxBufferSize = 256000;
class ReadStreamTokenizer extends AbstractTokenizer_1.AbstractTokenizer {
constructor(stream, fileInfo) {
super(fileInfo);
this.streamReader = new peek_readable_1.StreamReader(stream);
}
/**
* Get file information, an HTTP-client may implement this doing a HEAD request
* @return Promise with file information
*/
async getFileInfo() {
return this.fileInfo;
}
/**
* Read buffer from tokenizer
* @param buffer - Target buffer to fill with data read from the tokenizer-stream
* @param options - Read behaviour options
* @returns Promise with number of bytes read
*/
async readBuffer(buffer, options) {
// const _offset = position ? position : this.position;
// debug(`readBuffer ${_offset}...${_offset + length - 1}`);
let offset = 0;
let length = buffer.length;
if (options) {
if (Number.isInteger(options.length)) {
length = options.length;
}
else {
length -= options.offset || 0;
}
if (options.position) {
const skipBytes = options.position - this.position;
if (skipBytes > 0) {
await this.ignore(skipBytes);
return this.readBuffer(buffer, options);
}
else if (skipBytes < 0) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
}
if (options.offset) {
offset = options.offset;
}
}
if (length === 0) {
return 0;
}
const bytesRead = await this.streamReader.read(buffer, offset, length);
this.position += bytesRead;
if ((!options || !options.mayBeLess) && bytesRead < length) {
throw new peek_readable_1.EndOfStreamError();
}
return bytesRead;
}
/**
* Peek (read ahead) buffer from tokenizer
* @param buffer - Target buffer to write the data read to
* @param options - Read behaviour options
* @returns Promise with number of bytes peeked
*/
async peekBuffer(buffer, options) {
// const _offset = position ? position : this.position;
// debug(`peek ${_offset}...${_offset + length - 1}`);
let offset = 0;
let bytesRead;
let length = buffer.length;
if (options) {
if (options.offset) {
offset = options.offset;
}
if (Number.isInteger(options.length)) {
length = options.length;
}
else {
length -= options.offset || 0;
}
if (options.position) {
const skipBytes = options.position - this.position;
if (skipBytes > 0) {
const skipBuffer = Buffer.alloc(length + skipBytes);
bytesRead = await this.peekBuffer(skipBuffer, { mayBeLess: options.mayBeLess });
skipBuffer.copy(buffer, offset, skipBytes);
return bytesRead - skipBytes;
}
else if (skipBytes < 0) {
throw new Error('Cannot peek from a negative offset in a stream');
}
}
}
try {
bytesRead = await this.streamReader.peek(buffer, offset, length);
}
catch (err) {
if (options && options.mayBeLess && err instanceof peek_readable_1.EndOfStreamError) {
return 0;
}
throw err;
}
if ((!options || !options.mayBeLess) && bytesRead < length) {
throw new peek_readable_1.EndOfStreamError();
}
return bytesRead;
}
async ignore(length) {
// debug(`ignore ${this.position}...${this.position + length - 1}`);
const bufSize = Math.min(maxBufferSize, length);
const buf = Buffer.alloc(bufSize);
let totBytesRead = 0;
while (totBytesRead < length) {
const remaining = length - totBytesRead;
const bytesRead = await this.readBuffer(buf, { length: Math.min(bufSize, remaining) });
if (bytesRead < 0) {
return bytesRead;
}
totBytesRead += bytesRead;
}
return totBytesRead;
}
}
exports.ReadStreamTokenizer = ReadStreamTokenizer;
|