Skip to content

Commit

Permalink
stream: handle generator destruction from Duplex.from()
Browse files Browse the repository at this point in the history
  • Loading branch information
matthieusieben committed Sep 24, 2024
1 parent 3c5ceff commit c77637d
Show file tree
Hide file tree
Showing 2 changed files with 181 additions and 8 deletions.
55 changes: 47 additions & 8 deletions lib/internal/streams/duplexify.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,15 +83,19 @@ module.exports = function duplexify(body, name) {
}

if (typeof body === 'function') {
const { value, write, final, destroy } = fromAsyncGen(body);
let d;

const { value, write, final, destroy } = fromAsyncGen(body, () => {
if (d) destroyer(d);
});

// Body might be a constructor function instead of an async generator function.
if (isDuplexNodeStream(value)) {
return value;
return d = value;
}

if (isIterable(value)) {
return from(Duplexify, value, {
return d = from(Duplexify, value, {
// TODO (ronag): highWaterMark?
objectMode: true,
write,
Expand All @@ -102,12 +106,11 @@ module.exports = function duplexify(body, name) {

const then = value?.then;
if (typeof then === 'function') {
let d;

const promise = FunctionPrototypeCall(
then,
value,
(val) => {
destroyer(d);
if (val != null) {
throw new ERR_INVALID_RETURN_VALUE('nully', 'body', val);
}
Expand Down Expand Up @@ -208,11 +211,12 @@ module.exports = function duplexify(body, name) {
body);
};

function fromAsyncGen(fn) {
function fromAsyncGen(fn, destructor) {
let { promise, resolve } = createDeferredPromise();
const ac = new AbortController();
const signal = ac.signal;
const value = fn(async function*() {

const asyncGenerator = (async function* () {
while (true) {
const _promise = promise;
promise = null;
Expand All @@ -222,9 +226,44 @@ function fromAsyncGen(fn) {
if (signal.aborted)
throw new AbortError(undefined, { cause: signal.reason });
({ promise, resolve } = createDeferredPromise());
// Next line will "break" the loop if the generator is returned/thrown.
yield chunk;
}
}(), { signal });
})();

const originalReturn = asyncGenerator.return;
asyncGenerator.return = async function(value) {
try {
return await originalReturn.call(this, value);
} finally {
if (promise) {
const _promise = promise;
promise = null;
const { cb } = await _promise;
process.nextTick(cb);

destructor();
}
}
};

const originalThrow = asyncGenerator.throw;
asyncGenerator.throw = async function(err) {
try {
return await originalThrow.call(this, err);
} finally {
if (promise) {
const _promise = promise;
promise = null;
const { cb } = await _promise;

// asyncGenerator.throw(undefined) should cause a callback error
process.nextTick(cb, err ?? new AbortError());
}
}
};

const value = fn(asyncGenerator, { signal });

return {
value,
Expand Down
134 changes: 134 additions & 0 deletions test/parallel/test-stream-duplex-from.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ const assert = require('assert');
const { Duplex, Readable, Writable, pipeline, PassThrough } = require('stream');
const { ReadableStream, WritableStream } = require('stream/web');
const { Blob } = require('buffer');
const sleep = require('util').promisify(setTimeout);

{
const d = Duplex.from({
Expand Down Expand Up @@ -401,3 +402,136 @@ function makeATestWritableStream(writeFunc) {
assert.strictEqual(d.writable, false);
}));
}

{
const r = Readable.from(['foo', 'bar', 'baz']);
pipeline(
r,
Duplex.from(async function(asyncGenerator) {
const values = await Array.fromAsync(asyncGenerator);
assert.deepStrictEqual(values, ['foo', 'bar', 'baz']);

await asyncGenerator.return();
await asyncGenerator.return();
await asyncGenerator.return();
}),
common.mustSucceed(() => {
assert.strictEqual(r.destroyed, true);
})
);
}

{
const r = Readable.from(['foo', 'bar', 'baz']);
pipeline(
r,
Duplex.from(async function(asyncGenerator) {
// eslint-disable-next-line no-unused-vars
for await (const _ of asyncGenerator) break;
}),
common.mustSucceed(() => {
assert.strictEqual(r.destroyed, true);
})
);
}

{
const r = Readable.from(['foo', 'bar', 'baz']);
pipeline(
r,
Duplex.from(async function(asyncGenerator) {
const a = await asyncGenerator.next();
assert.strictEqual(a.done, false);
assert.strictEqual(a.value.toString(), 'foo');
const b = await asyncGenerator.return();
assert.strictEqual(b.done, true);
}),
common.mustSucceed(() => {
assert.strictEqual(r.destroyed, true);
})
);
}

{
const r = Readable.from(['foo', 'bar', 'baz']);
pipeline(
r,
Duplex.from(async function(asyncGenerator) {
// Note: the generator is not even started at this point
await asyncGenerator.return();
}),
common.mustSucceed(() => {
assert.strictEqual(r.destroyed, true);
})
);
}

{
const r = Readable.from(['foo', 'bar', 'baz']);
pipeline(
r,
Duplex.from(async function(asyncGenerator) {
// Same as before, with a delay
await sleep(100);
await asyncGenerator.return();
}),
common.mustSucceed(() => {
assert.strictEqual(r.destroyed, true);
})
);
}

{
const r = Readable.from(['foo', 'bar', 'baz']);
pipeline(
r,
Duplex.from(async function(asyncGenerator) {}),
common.mustCall((err) => {
assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE');
assert.strictEqual(r.destroyed, true);
})
);
}

{
const r = Readable.from(['foo', 'bar', 'baz']);
pipeline(
r,
Duplex.from(async function(asyncGenerator) {
await sleep(100);
}),
common.mustCall((err) => {
assert.strictEqual(err.code, 'ERR_STREAM_PREMATURE_CLOSE');
assert.strictEqual(r.destroyed, true);
})
);
}

{
const r = Readable.from(['foo']);
pipeline(
r,
Duplex.from(async function(asyncGenerator) {
await asyncGenerator.throw(new Error('my error'));
}),
common.mustCall((err) => {
assert.strictEqual(err.message, 'my error');
assert.strictEqual(r.destroyed, true);
})
);
}

{
const r = Readable.from(['foo', 'bar']);
pipeline(
r,
Duplex.from(async function(asyncGenerator) {
await asyncGenerator.next();
await asyncGenerator.throw(new Error('my error'));
}),
common.mustCall((err) => {
assert.strictEqual(err.message, 'my error');
assert.strictEqual(r.destroyed, true);
})
);
}

0 comments on commit c77637d

Please sign in to comment.