Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Node.js streams work #570

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Test and fix more use cases
  • Loading branch information
AArnott committed Dec 14, 2022
commit 8e2fabf71a1765144b86a0f7cfeef8f032c183de
2 changes: 1 addition & 1 deletion azure-pipelines/node.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@ steps:
displayName: 🧪 yarn tslint
inputs:
projectDirectory: src/nerdbank-streams
arguments: tslint --project .
arguments: lint
6 changes: 4 additions & 2 deletions src/nerdbank-streams/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@
"scripts": {
"build": "tsc -p gulpfile.tsconfig.json && gulp",
"watch": "node ./node_modules/typescript/bin/tsc -p tsconfig.json -w",
"test": "jasmine"
"test": "jasmine",
"lint": "tslint --project ."
},
"devDependencies": {
"@types/jasmine": "^4.0.3",
Expand All @@ -52,6 +53,7 @@
"await-semaphore": "^0.1.3",
"cancellationtoken": "^2.0.1",
"caught": "^0.1.3",
"msgpack-lite": "^0.1.26"
"msgpack-lite": "^0.1.26",
"plexer": "^2.0.0"
}
}
17 changes: 2 additions & 15 deletions src/nerdbank-streams/src/FullDuplexStream.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Duplex, PassThrough } from "stream";
import duplexer = require('plexer')

export class FullDuplexStream {
public static CreatePair(): { first: Duplex, second: Duplex } {
Expand All @@ -11,20 +12,6 @@ export class FullDuplexStream {
}

public static Splice(readable: NodeJS.ReadableStream, writable: NodeJS.WritableStream): Duplex {
const duplex = new Duplex({
write(chunk, encoding, callback) {
writable.write(chunk, encoding, callback);
},

final(callback) {
writable.end(callback);
},
});

// All reads and events come directly from the readable stream.
duplex.read = readable.read.bind(readable);
duplex.on = readable.on.bind(readable) as any;

return duplex;
return duplexer(writable, readable)
}
}
126 changes: 41 additions & 85 deletions src/nerdbank-streams/src/Utilities.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import CancellationToken from "cancellationtoken";
import { Readable, Writable } from "stream";
import { Deferred } from "./Deferred";
import { IDisposableObservable } from "./IDisposableObservable";

export async function writeAsync(stream: NodeJS.WritableStream, chunk: any) {
Expand Down Expand Up @@ -42,7 +41,11 @@ export function writeSubstream(stream: NodeJS.WritableStream): NodeJS.WritableSt
* @returns The result of reading from the stream. This will be null if the end of the stream is reached before any more can be read.
*/
export function readAsync(stream: NodeJS.ReadableStream, cancellationToken?: CancellationToken): Promise<string | Buffer | null> {
let result = stream.read()
if (!(stream.isPaused() || (stream as Readable).readableFlowing !== true)) {
throw new Error('Stream must not be in flowing mode.');
}

const result = stream.read()
if (result) {
return Promise.resolve(result)
}
Expand All @@ -55,6 +58,7 @@ export function readAsync(stream: NodeJS.ReadableStream, cancellationToken?: Can
stream.once('data', onData);
stream.once('error', onError);
stream.once('end', onEnd);
stream.resume()

function onData(chunk) {
cleanup();
Expand All @@ -72,6 +76,7 @@ export function readAsync(stream: NodeJS.ReadableStream, cancellationToken?: Can
}

function cleanup() {
stream.pause();
stream.off('data', onData);
stream.off('error', onError);
stream.off('end', onEnd);
Expand All @@ -91,8 +96,8 @@ export function readAsync(stream: NodeJS.ReadableStream, cancellationToken?: Can
export function sliceStream(stream: NodeJS.ReadableStream, length: number): Readable {
return new Readable({
async read(_: number) {
while (length > 0) {
const chunk = await readAsync(stream);
if (length > 0) {
const chunk = stream.read() ?? await readAsync(stream);
if (!chunk) {
// We've reached the end of the source stream.
this.push(null);
Expand All @@ -102,12 +107,13 @@ export function sliceStream(stream: NodeJS.ReadableStream, length: number): Read
const countToConsume = Math.min(length, chunk.length)
length -= countToConsume
stream.unshift(chunk.slice(countToConsume))
if (!this.push(chunk.slice(0, countToConsume))) {
return;
if (this.push(chunk.slice(0, countToConsume)) && length === 0) {
// Save another call later by informing immediately that we're at the end of the stream.
this.push(null);
}
} else {
this.push(null);
}

this.push(null);
},
});
}
Expand All @@ -130,18 +136,15 @@ export function readSubstream(stream: NodeJS.ReadableStream): NodeJS.ReadableStr
currentSlice = sliceStream(stream, length)
}

while (currentSlice !== null) {
const chunk = await readAsync(currentSlice);
if (!chunk) {
// We've reached the end of this chunk. We'll have to read the next header.
currentSlice = null;
break;
}

if (!this.push(chunk)) {
return;
}
const chunk = await readAsync(currentSlice);
if (!chunk) {
// We've reached the end of this chunk. We'll have to read the next header.
currentSlice = null;
continue;
}

this.push(chunk);
return;
}
},
});
Expand All @@ -165,81 +168,34 @@ export async function getBufferFrom(
allowEndOfStream: boolean = false,
cancellationToken?: CancellationToken): Promise<Buffer | null> {

const streamEnded = new Deferred<void>();

if (size === 0) {
return Buffer.from([]);
return Buffer.alloc(0)
}

let readBuffer: Buffer | null = null;
let index: number = 0;
while (size > 0) {
cancellationToken?.throwIfCancelled();
let availableSize = (readable as Readable).readableLength;
if (!availableSize) {
// Check the end of stream
if ((readable as Readable).readableEnded || streamEnded.isCompleted) {
// stream is closed
if (!allowEndOfStream) {
throw new Error("Stream terminated before required bytes were read.");
}

// Returns what has been read so far
if (readBuffer === null) {
return null;
}

// we need trim extra spaces
return readBuffer.subarray(0, index)
}

// we retain this behavior when availableSize === false
// to make existing unit tests happy (which assumes we will try to read stream when no data is ready.)
availableSize = size;
} else if (availableSize > size) {
availableSize = size;
}

const newBuffer = readable.read(availableSize) as Buffer;
if (newBuffer) {
if (newBuffer.length < availableSize && !allowEndOfStream) {
throw new Error("Stream terminated before required bytes were read.");
}

if (readBuffer === null) {
if (availableSize === size || newBuffer.length < availableSize) {
// in the fast pass, we read the entire data once, and donot allocate an extra array.
return newBuffer;
}
const initialData = readable.read(size) as Buffer | null;
if (initialData) {
return initialData;
}

// if we read partial data, we need allocate a buffer to join all data together.
readBuffer = Buffer.alloc(size);
let totalBytesRead = 0
const result = Buffer.alloc(size);
const streamSlice = sliceStream(readable, size);
while (totalBytesRead < size) {
const chunk = await readAsync(streamSlice, cancellationToken) as Buffer | null
if (chunk === null) {
// We reached the end prematurely.
if (allowEndOfStream) {
return totalBytesRead === 0 ? null : result.subarray(0, totalBytesRead)
} else {
throw new Error(`End of stream encountered after only ${totalBytesRead} bytes when ${size} were expected.`);
}

// now append new data to the buffer
newBuffer.copy(readBuffer, index);

size -= newBuffer.length;
index += newBuffer.length;
}

if (size > 0) {
const bytesAvailable = new Deferred<void>();
const bytesAvailableCallback = bytesAvailable.resolve.bind(bytesAvailable);
const streamEndedCallback = streamEnded.resolve.bind(streamEnded);
readable.once("readable", bytesAvailableCallback);
readable.once("end", streamEndedCallback);
try {
const endPromise = Promise.race([bytesAvailable.promise, streamEnded.promise]);
await (cancellationToken ? cancellationToken.racePromise(endPromise) : endPromise);
} finally {
readable.removeListener("readable", bytesAvailableCallback);
readable.removeListener("end", streamEndedCallback);
}
}
chunk.copy(result, totalBytesRead);
totalBytesRead += chunk.length;
}

return readBuffer;
return result;
}

export function throwIfDisposed(value: IDisposableObservable) {
Expand Down
24 changes: 15 additions & 9 deletions src/nerdbank-streams/src/tests/FullDuplexStream.spec.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { PassThrough, Readable, Writable } from "stream";
import { Deferred } from "../Deferred";
import { FullDuplexStream } from "../FullDuplexStream";
import { getBufferFrom } from "../Utilities";
import { getBufferFrom, readAsync } from "../Utilities";
import { delay } from "./Timeout";

describe("FullDuplexStream.CreatePair", () => {
Expand All @@ -24,20 +24,20 @@ describe("FullDuplexStream.CreatePair", () => {
await endPropagatesEndEvent(pair.second, pair.first);
});

it("stream1 write end leads to stream2 finish event", async () => {
it("stream1 write end leads to stream1 finish event", async () => {
const pair = FullDuplexStream.CreatePair();
await endPropagatesFinishEvent(pair.first, pair.second);
await endPropagatesFinishEvent(pair.second, pair.first);
await endRaisesFinishEvent(pair.first);
await endRaisesFinishEvent(pair.second);
});

async function writePropagation(first: Writable, second: Readable): Promise<void> {
first.write("abc");
expect(second.read()).toEqual(Buffer.from("abc"));
expect(await readAsync(second)).toEqual(Buffer.from("abc"));
}

async function endPropagatesFinishEvent(first: Writable, second: Readable): Promise<void> {
async function endRaisesFinishEvent(first: Writable): Promise<void> {
const signal = new Deferred<void>();
second.once("finish", () => {
first.once("finish", () => {
signal.resolve();
});
expect(signal.isCompleted).toBe(false);
Expand All @@ -63,8 +63,8 @@ describe("FullDuplexStream.Splice", () => {
let duplex: NodeJS.ReadWriteStream;

beforeEach(() => {
readable = new PassThrough({ writableHighWaterMark : 8 });
writable = new PassThrough({ writableHighWaterMark : 8 });
readable = new PassThrough({ writableHighWaterMark: 8 });
writable = new PassThrough({ writableHighWaterMark: 8 });
duplex = FullDuplexStream.Splice(readable, writable);
});

Expand All @@ -88,6 +88,12 @@ describe("FullDuplexStream.Splice", () => {
expect(buffer).toBeNull();
});

it("unshift", async () => {
duplex.unshift(Buffer.from([1, 2, 3]))
const result = duplex.read()
expect(result).toEqual(Buffer.from([1, 2, 3]))
})

it("Read should yield when data is not ready", async () => {
const task = writeToStream(duplex, "abcdefgh", 4);
const buffer = await getBufferFrom(writable, 32);
Expand Down
26 changes: 2 additions & 24 deletions src/nerdbank-streams/src/tests/MultiplexingStream.Interop.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import { Deferred } from "../Deferred";
import { FullDuplexStream } from "../FullDuplexStream";
import { MultiplexingStream } from "../MultiplexingStream";
import { ChannelOptions } from "../ChannelOptions";
import { readAsync } from "../Utilities";

[1, 2, 3].forEach(protocolMajorVersion => {
describe(`MultiplexingStream v${protocolMajorVersion} (interop) `, () => {
Expand Down Expand Up @@ -101,34 +102,11 @@ import { ChannelOptions } from "../ChannelOptions";
return deferred.promise;
}

async function readAsync(readable: NodeJS.ReadableStream): Promise<Buffer | null> {
let readBuffer = readable.read() as Buffer;

if (readBuffer === null) {
const bytesAvailable = new Deferred<void>();
const streamEnded = new Deferred<void>();
const bytesAvailableCallback = bytesAvailable.resolve.bind(bytesAvailable);
const streamEndedCallback = streamEnded.resolve.bind(streamEnded);
readable.once("readable", bytesAvailableCallback);
readable.once("end", streamEndedCallback);
await Promise.race([bytesAvailable.promise, streamEnded.promise]);
readable.removeListener("readable", bytesAvailableCallback);
readable.removeListener("end", streamEndedCallback);
if (bytesAvailable.isCompleted) {
readBuffer = readable.read() as Buffer;
} else {
return null;
}
}

return readBuffer;
}

async function readLineAsync(readable: NodeJS.ReadableStream): Promise<string | null> {
const buffers: Buffer[] = [];

while (true) {
const segment = await readAsync(readable);
const segment = await readAsync(readable) as Buffer | null;
if (segment === null) {
break;
}
Expand Down
10 changes: 4 additions & 6 deletions src/nerdbank-streams/src/tests/Substream.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -134,12 +134,8 @@ describe("Substream", () => {
await endAsync(thru);

const substream = readSubstream(thru);
let readPayload = await getBufferFrom(substream, payload1.length);
expect(readPayload).toEqual(payload1);
readPayload = await getBufferFrom(substream, payload2.length);
expect(readPayload).toEqual(payload2);

await expectEndOfStream(substream);
const readPayload = await getBufferFrom(substream, 10, true);
expect(readPayload).toEqual(Buffer.from([1, 2, 3, 4, 5, 6]));
await expectEndOfStream(thru);
});

Expand Down Expand Up @@ -187,6 +183,8 @@ describe("Substream", () => {
}

function expectEndOfStream(stream: NodeJS.ReadableStream): Promise<void> {
expect(stream.read()).toBeNull()
stream.resume();
return new Promise<void>((resolve, reject) => {
stream.once("end", () => resolve());
stream.once("data", () => reject(new Error('EOF expected.')));
Expand Down
Loading