Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Node.js streams work #570

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Fix readSubstream to not buffer everything first
Also export new `sliceStream` and `readAsync` functions.
  • Loading branch information
AArnott committed Dec 14, 2022
commit 5a0a549623881822bb201397f45cbb86e596bf4a
114 changes: 104 additions & 10 deletions src/nerdbank-streams/src/Utilities.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,20 +35,114 @@ export function writeSubstream(stream: NodeJS.WritableStream): NodeJS.WritableSt
});
}

export function readSubstream(stream: NodeJS.ReadableStream): NodeJS.ReadableStream {
/**
* Reads the next chunk from a stream, asynchronously waiting for more to be read if necessary.
* @param stream The stream to read from.
* @param cancellationToken A token whose cancellation will result in immediate rejection of the previously returned promise.
* @returns The result of reading from the stream. This will be null if the end of the stream is reached before any more can be read.
*/
export function readAsync(stream: NodeJS.ReadableStream, cancellationToken?: CancellationToken): Promise<string | Buffer | null> {
let result = stream.read()
if (result) {
return Promise.resolve(result)
}

return new Promise<string | Buffer | null>((resolve, reject) => {
const ctReg = cancellationToken?.onCancelled(reason => {
cleanup();
reject(reason);
});
stream.once('data', onData);
stream.once('error', onError);
stream.once('end', onEnd);

function onData(chunk) {
cleanup();
resolve(chunk);
}

function onError(...args) {
cleanup();
reject(...args);
}

function onEnd() {
cleanup();
resolve(null);
}

function cleanup() {
stream.off('data', onData);
stream.off('error', onError);
stream.off('end', onEnd);
if (ctReg) {
ctReg();
}
}
})
}

/**
* Returns a readable stream that will read just a slice of some existing stream.
* @param stream The stream to read from.
* @param length The maximum number of bytes to read from the stream.
* @returns A stream that will read up to the given number of elements, leaving the rest in the underlying stream.
*/
export function sliceStream(stream: NodeJS.ReadableStream, length: number): Readable {
return new Readable({
async read(_: number) {
const lenBuffer = await getBufferFrom(stream, 4);
const dv = new DataView(lenBuffer.buffer, lenBuffer.byteOffset, lenBuffer.length);
const chunkSize = dv.getUint32(0, false);
if (chunkSize === 0) {
this.push(null);
return;
while (length > 0) {
const chunk = await readAsync(stream);
if (!chunk) {
// We've reached the end of the source stream.
this.push(null);
return;
}

const countToConsume = Math.min(length, chunk.length)
length -= countToConsume
stream.unshift(chunk.slice(countToConsume))
if (!this.push(chunk.slice(0, countToConsume))) {
return;
}
}

// TODO: make this *stream* instead of read as an atomic chunk.
const payload = await getBufferFrom(stream, chunkSize);
this.push(payload);
this.push(null);
},
});
}

export function readSubstream(stream: NodeJS.ReadableStream): NodeJS.ReadableStream {
let currentSlice: Readable | null = null
return new Readable({
async read(_: number) {
while (true) {
if (currentSlice === null) {
const lenBuffer = await getBufferFrom(stream, 4);
const dv = new DataView(lenBuffer.buffer, lenBuffer.byteOffset, lenBuffer.length);
const length = dv.getUint32(0, false);
if (length === 0) {
// We've reached the end of the substream.
this.push(null);
return;
}

currentSlice = sliceStream(stream, length)
}

while (currentSlice !== null) {
const chunk = await readAsync(currentSlice);
if (!chunk) {
// We've reached the end of this chunk. We'll have to read the next header.
currentSlice = null;
break;
}

if (!this.push(chunk)) {
return;
}
}
}
},
});
}
Expand Down
2 changes: 1 addition & 1 deletion src/nerdbank-streams/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,5 @@ export { FullDuplexStream } from "./FullDuplexStream";
export { IDisposableObservable } from "./IDisposableObservable";
export { MultiplexingStream } from "./MultiplexingStream";
export { MultiplexingStreamOptions } from "./MultiplexingStreamOptions";
export { writeSubstream, readSubstream } from "./Utilities";
export { writeSubstream, readSubstream, readAsync, sliceStream } from "./Utilities";
export { QualifiedChannelId, ChannelSource } from "./QualifiedChannelId";
62 changes: 62 additions & 0 deletions src/nerdbank-streams/src/tests/Utilities.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
import CancellationToken from "cancellationtoken";
import { PassThrough } from "stream"
import { readAsync, sliceStream } from "../Utilities";

let thru: PassThrough
beforeEach(() => {
thru = new PassThrough();
})

describe('readAsync', function () {
it('returns immediately with results', async function () {
thru.write(Buffer.from([1, 2, 3]))
thru.write(Buffer.from([4, 5, 6]))

const result = await readAsync(thru)
expect(result).toEqual(Buffer.from([1, 2, 3, 4, 5, 6]))
})

it('to wait for data', async function () {
const resultPromise = readAsync(thru);

thru.write(Buffer.from([1, 2, 3]))
thru.write(Buffer.from([4, 5, 6]))

const result = await resultPromise;
expect(result).toEqual(Buffer.from([1, 2, 3]))
})

it('to return null at EOF', async function () {
thru.end()
expect(await readAsync(thru)).toBeNull()
})

it('to propagate errors', async function () {
const error = new Error('Mock error')
thru.destroy(error)
await expectAsync(readAsync(thru)).toBeRejectedWith(error);
})

it('bails on cancellation', async function () {
const cts = CancellationToken.create();
const readPromise = readAsync(thru, cts.token);
cts.cancel();
await expectAsync(readPromise).toBeRejected();
})
})

describe('sliceStream', function () {
it('returns null on empty', async function () {
thru.end()
const slice = sliceStream(thru, 5)
expect(slice.read()).toBeNull()
})

it('returns subset of upper stream', async function () {
thru.push(Buffer.from([1, 2, 3, 4, 5, 6]))
const slice = sliceStream(thru, 3)
expect(await readAsync(slice)).toEqual(Buffer.from([1, 2, 3]))
expect(await readAsync(slice)).toBeNull()
expect(await readAsync(thru)).toEqual(Buffer.from([4, 5, 6]))
})
})