-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmod.ts
313 lines (290 loc) · 9.55 KB
/
mod.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
* Copyright (C) Oliver Lenehan (sunsetkookaburra), 2022 */
export * from "./types.d.ts";
export * from "./util.ts";
export { Buffer } from "./deps.ts";
import { Buffer } from "./deps.ts";
import { Basin, Codec, Enc, Sink, Source } from "./types.d.ts";
import { asBytes } from "./util.ts";
/** Represents the byte-order used to encode numbers. */
export type Endian = "be" | "le";
/** The byte-order used by the system to encode numbers,
* either `"be"` or `"le"`.
* Currently does not detect mixed-endian *(unsure if this
* is a concern for JavaScript applications)*. */
export const SYSTEM_ENDIAN: Endian = (() => {
const a = new Uint16Array([0x1234]);
return (asBytes(a)[0] == 0x12) ? "be" : "le";
})();
export class DecodeError extends Error {
constructor(reason: string) {
super(reason);
this.name = this.constructor.name;
}
}
export class ZeroCopier {
#reader: ReadableStreamBYOBReader;
/** Creates a new wrapper around an open reader instance. */
constructor(source: Source<Uint8Array>) {
this.#reader = source.readable.getReader({ mode: "byob" });
}
/** Read bytes from a BYOB stream, until specified window
* filled or the end of the stream is reached.
* Returns `Uint8Array` view on section of the now-detached `buffer` used by `readInto`.
*
* **Note: returned `Uint8Array` may be shorter than `limit`.**
*
* ```ts
* const zc = new ZeroCopier(byteStream);
* let bytes = new Uint8Array(12);
* bytes = await zc.readInto(bytes.buffer);
* zc.close();
* if (bytes.byteLength != 12) {
* console.log("End of stream reached");
* }
* ```
*
* See how it was implemented:
* + [web.dev Streams API Guide](https://web.dev/streams/#readable-byte-stream-code-sample)
* + [Deno GitHub Issue on Detached Buffers](https://github.com/denoland/deno/issues/14382)
* + [MDN Docs on Using Readable Byte Streams](https://developer.mozilla.org/en-US/docs/Web/API/Streams_API/Using_readable_byte_streams#consuming_the_byte_stream)
*/
async readInto(buffer: ArrayBuffer, offset = 0, limit = Math.max(0, buffer.byteLength - offset)) {
let nread = 0;
while (nread < limit) {
const { value, done } = await this.#reader.read(
// Create a new ArrayBufferView (of which Uint8Array is one).
// This gives a window / slot to be read into so we can read exactly what we want.
new Uint8Array(buffer, offset + nread, limit - nread),
);
if (value !== undefined) {
// We've successfully got some data from our source; however, because
// we passed in our buffer to a BYOB reader (via Uint8Array
// constructor above) the ArrayBuffer has become detached.
// Typically if you access a detached buffer it's obvious because it has
// has zero elements, but a bug in Deno means it appears as a zero-
// filled buffer (making one think nothing, or only zeros, were read).
// See https://github.com/denoland/deno/issues/14382#issue-1213634663
// We can claw back a detached array buffer via 'value',
// which is a Uint8Array view of the bytes successfully read.
buffer = value.buffer;
// The byte length of the view is how much was read.
nread += value.byteLength;
} else {
// stream cancelled
break;
}
// end of stream
if (done) break;
}
return new Uint8Array(buffer, offset, nread);
}
/** Release the reading lock on the stream provided at construction. */
close() {
// If we didn't free the reader, no one would be able to read from the source again.
this.#reader.releaseLock();
}
}
export class ZeroCopyBuf implements Readonly<ArrayBufferView> {
#buf: ArrayBuffer;
#offset: number;
#length: number;
/** Create a new container for zero-copy byob stream operations.
*
* ```ts
* const zcbuf = new ZeroCopyBuf(42);
* const window = await zcbuf.moveExactFrom(source);
* console.log(window);
* ```
*/
constructor(size: number) {
this.#buf = new ArrayBuffer(size);
this.#offset = 0;
this.#length = size;
}
get buffer(): ArrayBufferLike {
return this.#buf;
}
get byteLength(): number {
return this.#length;
}
get byteOffset(): number {
return this.#offset;
}
/** Read bytes from a BYOB stream, until specified window of `ZeroCopyBuf`
* filled or the end of the stream is reached.
* Returns `Uint8Array` view on section of `ZeroCopyBuf` filled by read.
*
* **Note: returned `Uint8Array` may be shorter than `limit`.**
*
* ```ts
* const v = await buf.fillFrom(r);
* if (v.byteLength != buf.byteLength) {
* console.log("End of stream reached");
* }
* ```
*
* See how it was implemented:
* + [web.dev Streams API Guide](https://web.dev/streams/#readable-byte-stream-code-sample)
* + [Deno GitHub Issue on Detached Buffers](https://github.com/denoland/deno/issues/14382)
* + [MDN Docs on Using Readable Byte Streams](https://developer.mozilla.org/en-US/docs/Web/API/Streams_API/Using_readable_byte_streams#consuming_the_byte_stream)
*/
async fillFrom(
source: Source<Uint8Array>,
offset = 0,
limit = this.#buf.byteLength,
): Promise<Uint8Array> {
const zc = new ZeroCopier(source);
const v = await zc.readInto(this.#buf, offset, limit);
this.#buf = v.buffer;
this.#offset = v.byteOffset;
this.#length = v.byteLength;
zc.close();
return v;
}
/** Perform a zero-copy byob read from `source`, filling up exactly the
* specified window. In the case that not enough bytes could be read,
* throw an error. Returns a `Uint8Array` window into the internal buffer.
*
* ```ts
* const zcbuf = new ZeroCopyBuf(42);
* const window = await zcbuf.fillExactFrom(source);
* console.log(window);
* ```
*/
async fillExactFrom(
source: Source<Uint8Array>,
offset = 0,
count = this.#buf.byteLength,
): Promise<Uint8Array> {
const window = await this.fillFrom(source, offset, count);
if (window.byteLength != count) throw new DecodeError("Unexpected EOF");
return window;
}
}
/** Read exactly `n` bytes into a new `Uint8Array` buffer and return it.
* `source` should implement BYOB.
*
* ```ts
* const out: Uint8Array = await readBytes(readableByteStreamSource, 12);
* out.byteLength == 12; // true
* ```
*/
export async function readBytes(
source: Source<Uint8Array>,
n: number,
): Promise<Uint8Array> {
return await new ZeroCopyBuf(n).fillExactFrom(source);
}
/** Write a chunk into a Stream `Sink<T>`.
*
* ```ts
* const buf = new Buffer();
* const txt = new TextEncoder().encode("Hello, World!");
* await write(buf, txt);
* ```
*/
export async function write<T>(
sink: Sink<T>,
data: T,
): Promise<void> {
const w = sink.writable.getWriter();
await w.ready;
await w.write(data);
w.releaseLock();
}
/** Try to read `n` chunks from a Stream `Source<T>`,
* or as many up to an early EOF.
*
* `n` may be `Infinity` to collect all available chunks,
* provided `source` is a finite stream.
*
* ```ts
* const out = await take(source, 3);
* out.length <= 3; // true
* ```
*/
export async function gather<T>(
source: Source<T>,
n = Infinity,
): Promise<T[]> {
const out = [];
const r = source.readable.getReader();
for (let i = 0; i < n; ++i) {
const { done, value } = await r.read();
if (value !== undefined) out.push(value);
if (done) break;
}
r.releaseLock();
return out;
}
export async function encode<T>(enc: Enc<T>, value: T): Promise<Uint8Array> {
const buf = new Buffer();
await enc.writeTo(buf, value);
return buf.bytes();
}
export async function transform<I, O>(
basin: TransformStream<I, O>,
value: I,
) {
const [_, [result]] = await Promise.all([
write(basin, value),
gather(basin, 1),
]);
return result;
}
/** NB: this exists, and future implementors should take note,
* that any reads in the writable stream or writes in the readable stream
* will cause headaches with already locked readers / writers */
export class BasinStream<I, O = I> implements Basin<I, O> {
readonly writable: WritableStream<I>;
readonly readable: ReadableStream<O>;
constructor(
{
writable,
readable,
start,
}: {
writable: Omit<UnderlyingSink<I>, "start">,
readable: Omit<UnderlyingSource<O>, "start">,
start?: () => void | Promise<void>,
},
writableStrategy?: QueuingStrategy,
readableStrategy?: QueuingStrategy,
) {
const startedDefer = Promise.withResolvers();
(async () => await start?.())().then(() => startedDefer.resolve(void(0)));
this.writable = new WritableStream({
...writable,
start: async () => {
await startedDefer.promise;
},
}, writableStrategy);
this.readable = new ReadableStream({
...readable,
start: async () => {
await startedDefer.promise;
},
}, readableStrategy);
}
}
export class CodecStream<I, O = I> extends BasinStream<I, O> {
constructor(underlying: Basin<Uint8Array>, codec: Codec<I, O>, { buffered }: { buffered: boolean }) {
super({
writable: {
write: (
buffered === true
? async (chunk) => await write(underlying, await encode(codec, chunk))
: async (chunk) => await codec.writeTo(underlying, chunk)
),
},
readable: {
pull: async (controller) => {
controller.enqueue(await codec.readFrom(underlying));
},
},
});
}
}